Example #1
0
def configure(machobj, output_dir, macros_format, compiler, mpilib, debug,
              sysos, unit_testing=False):
    """Add Macros, Depends, and env_mach_specific files to a directory.

    Arguments:
    machobj - Machines argument for this machine.
    output_dir - Directory in which to place output.
    macros_format - Container containing the string 'Makefile' to produce
                    Makefile Macros output, and/or 'CMake' for CMake output.
    compiler - String containing the compiler vendor to configure for.
    mpilib - String containing the MPI implementation to configure for.
    debug - Boolean specifying whether debugging options are enabled.
    unit_testing - Boolean specifying whether we're running unit tests (as
                   opposed to a system run)
    """
    # Macros generation.
    suffixes = {'Makefile': 'make', 'CMake': 'cmake'}
    macro_maker = Compilers(machobj, compiler=compiler, mpilib=mpilib)
    for form in macros_format:
        out_file_name = os.path.join(output_dir,"Macros."+suffixes[form])
        macro_maker.write_macros_file(macros_file=out_file_name, output_format=suffixes[form])

    _copy_depends_files(machobj.get_machine_name(), machobj.machines_dir, output_dir, compiler)
    _generate_env_mach_specific(output_dir, machobj, compiler, mpilib,
                                debug, sysos, unit_testing)
Example #2
0
def configure(machobj,
              output_dir,
              macros_format,
              compiler,
              mpilib,
              debug,
              sysos,
              unit_testing=False):
    """Add Macros, Depends, and env_mach_specific files to a directory.

    Arguments:
    machobj - Machines argument for this machine.
    output_dir - Directory in which to place output.
    macros_format - Container containing the string 'Makefile' to produce
                    Makefile Macros output, and/or 'CMake' for CMake output.
    compiler - String containing the compiler vendor to configure for.
    mpilib - String containing the MPI implementation to configure for.
    debug - Boolean specifying whether debugging options are enabled.
    unit_testing - Boolean specifying whether we're running unit tests (as
                   opposed to a system run)
    """
    # Macros generation.
    suffixes = {'Makefile': 'make', 'CMake': 'cmake'}
    macro_maker = Compilers(machobj, compiler=compiler, mpilib=mpilib)
    for form in macros_format:
        out_file_name = os.path.join(output_dir, "Macros." + suffixes[form])
        macro_maker.write_macros_file(macros_file=out_file_name,
                                      output_format=suffixes[form])

    _copy_depends_files(machobj.get_machine_name(), machobj.machines_dir,
                        output_dir, compiler)
    _generate_env_mach_specific(output_dir, machobj, compiler, mpilib, debug,
                                sysos, unit_testing)
Example #3
0
 def _has_unit_test_support(self):
     if self.TEST_COMPILER is None:
         default_compiler = self.MACHINE.get_default_compiler()
         compiler = Compilers(self.MACHINE, compiler=default_compiler)
     else:
         compiler = Compilers(self.MACHINE, compiler=self.TEST_COMPILER)
     attrs = {"MPILIB": "mpi-serial", "compile_threaded": "FALSE"}
     pfunit_path = compiler.get_optional_compiler_node("PFUNIT_PATH",
                                                       attributes=attrs)
     if pfunit_path is None:
         return False
     else:
         return True
Example #4
0
 def test_script_is_callable(self):
     """The test script can be called on valid output without dying."""
     # This is really more a smoke test of this script than anything else.
     maker = Compilers(test_utils.MockMachines("mymachine", "SomeOS"),
                       version=2.0)
     test_xml = test_utils._wrap_config_compilers_xml(
         "<compiler><SUPPORTS_CXX>FALSE</SUPPORTS_CXX></compiler>")
     test_utils.get_macros(maker, test_xml, "Makefile")
Example #5
0
 def test_script_rejects_bad_build_system(self):
     """The macro writer rejects a bad build system string."""
     maker = Compilers(test_utils.MockMachines("mymachine", "SomeOS"),
                       version=2.0)
     bad_string = "argle-bargle."
     with self.assertRaisesRegex(
             utils.CIMEError,
             "Unrecognized build system provided to write_macros: " +
             bad_string,
     ):
         test_utils.get_macros(maker, "This string is irrelevant.",
                               bad_string)
Example #6
0
    def setUp(self):
        self.test_os = "SomeOS"
        self.test_machine = "mymachine"
        self.test_compiler = (self.MACHINE.get_default_compiler()
                              if self.TEST_COMPILER is None else
                              self.TEST_COMPILER)
        self.test_mpilib = (self.MACHINE.get_default_MPIlib(
            attributes={"compiler": self.test_compiler})
                            if self.TEST_MPILIB is None else self.TEST_MPILIB)

        self._maker = Compilers(test_utils.MockMachines(
            self.test_machine, self.test_os),
                                version=2.0)

        super().setUp()
Example #7
0
    def setUp(self):
        super().setUp()

        if "CIME_NO_CMAKE_MACRO" not in os.environ:
            self.skipTest("Skipping test of old macro system")

        self.test_os = "SomeOS"
        self.test_machine = "mymachine"
        self.test_compiler = (self.MACHINE.get_default_compiler()
                              if self.TEST_COMPILER is None else
                              self.TEST_COMPILER)
        self.test_mpilib = (self.MACHINE.get_default_MPIlib(
            attributes={"compiler": self.test_compiler})
                            if self.TEST_MPILIB is None else self.TEST_MPILIB)

        self._maker = Compilers(test_utils.MockMachines(
            self.test_machine, self.test_os),
                                version=2.0)
Example #8
0
def _main():
    output, build_dir, build_optimized, clean,\
        cmake_args, compiler, enable_genf90, machine, machines_dir,\
        make_j, use_mpi, mpilib, mpirun_command, test_spec_dir, ctest_args,\
        use_openmp, xml_test_list, verbose \
        = parse_command_line(sys.argv)

    #=================================================
    # Find directory and file paths.
    #=================================================
    suite_specs = []
    # TODO: this violates cime policy of direct access to xml
    # should be moved to CIME/XML
    if xml_test_list is not None:
        test_xml_tree = ElementTree()
        test_xml_tree.parse(xml_test_list)
        known_paths = {
            "here": os.path.abspath(os.path.dirname(xml_test_list)),
        }
        suite_specs.extend(suites_from_xml(test_xml_tree, known_paths))
    if test_spec_dir is not None:
        suite_specs.append(
            TestSuiteSpec("__command_line_test__", ["__command_line_test__"],
                          [os.path.abspath(test_spec_dir)]))

    if machines_dir is not None:
        machines_file = os.path.join(machines_dir, "config_machines.xml")
        machobj = Machines(infile=machines_file, machine=machine)
    else:
        machobj = Machines(machine=machine)

    # Create build directory if necessary.
    build_dir = os.path.abspath(build_dir)

    if not os.path.isdir(build_dir):
        os.mkdir(build_dir)

    # Switch to the build directory.
    os.chdir(build_dir)
    if clean:
        pwd_contents = os.listdir(os.getcwd())
        # Clear CMake cache.
        for file_ in pwd_contents:
            if file_ in ("Macros.cmake", "env_mach_specific.xml") \
                    or file_.startswith('Depends') or file_.startswith(".env_mach_specific"):
                os.remove(file_)

    #=================================================
    # Functions to perform various stages of build.
    #=================================================

    if not use_mpi:
        mpilib = "mpi-serial"
    elif mpilib is None:
        mpilib = machobj.get_default_MPIlib()
        logger.info("Using mpilib: {}".format(mpilib))

    if compiler is None:
        compiler = machobj.get_default_compiler()
        logger.info("Compiler is {}".format(compiler))

    compilerobj = Compilers(machobj, compiler=compiler, mpilib=mpilib)

    pfunit_path = find_pfunit(compilerobj,
                              mpilib=mpilib,
                              use_openmp=use_openmp)

    debug = not build_optimized
    os_ = machobj.get_value("OS")

    # Create the environment, and the Macros.cmake file
    #
    #
    configure(machobj,
              build_dir, ["CMake"],
              compiler,
              mpilib,
              debug,
              os_,
              unit_testing=True)
    machspecific = EnvMachSpecific(build_dir, unit_testing=True)

    fake_case = FakeCase(compiler, mpilib, debug)
    machspecific.load_env(fake_case)
    os.environ["OS"] = os_
    os.environ["COMPILER"] = compiler
    os.environ["DEBUG"] = stringify_bool(debug)
    os.environ["MPILIB"] = mpilib
    if use_openmp:
        os.environ["compile_threaded"] = "true"
    else:
        os.environ["compile_threaded"] = "false"

    os.environ["UNIT_TEST_HOST"] = socket.gethostname()
    if "NETCDF_PATH" in os.environ and not "NETCDF" in os.environ:
        # The CMake Netcdf find utility that we use (from pio2) seems to key off
        # of the environment variable NETCDF, but not NETCDF_PATH
        logger.info("Setting NETCDF environment variable: {}".format(
            os.environ["NETCDF_PATH"]))
        os.environ["NETCDF"] = os.environ["NETCDF_PATH"]

    if not use_mpi:
        mpirun_command = ""
    elif mpirun_command is None:
        mpi_attribs = {
            "compiler": compiler,
            "mpilib": mpilib,
            "threaded": use_openmp,
            "unit_testing": True
        }

        # We can get away with specifying case=None since we're using exe_only=True
        mpirun_command, _ = machspecific.get_mpirun(None,
                                                    mpi_attribs,
                                                    None,
                                                    exe_only=True)
        mpirun_command = machspecific.get_resolved_value(mpirun_command)
        logger.info("mpirun command is '{}'".format(mpirun_command))


#=================================================
# Run tests.
#=================================================

    for spec in suite_specs:
        os.chdir(build_dir)
        if os.path.isdir(spec.name):
            if clean:
                rmtree(spec.name)

        if not os.path.isdir(spec.name):
            os.mkdir(spec.name)

        for label, directory in spec:
            os.chdir(os.path.join(build_dir, spec.name))
            if not os.path.isdir(label):
                os.mkdir(label)

            os.chdir(label)

            name = spec.name + "/" + label

            if not os.path.islink("Macros.cmake"):
                os.symlink(os.path.join(build_dir, "Macros.cmake"),
                           "Macros.cmake")
            use_mpiserial = not use_mpi
            cmake_stage(name,
                        directory,
                        build_optimized,
                        use_mpiserial,
                        mpirun_command,
                        output,
                        pfunit_path,
                        verbose=verbose,
                        enable_genf90=enable_genf90,
                        cmake_args=cmake_args)
            make_stage(name, output, make_j, clean=clean, verbose=verbose)

    for spec in suite_specs:
        os.chdir(os.path.join(build_dir, spec.name))
        for label, directory in spec:

            name = spec.name + "/" + label

            output.print_header("Running CTest tests for " + name + ".")

            ctest_command = ["ctest", "--output-on-failure"]

            if verbose:
                ctest_command.append("-VV")

            if ctest_args is not None:
                ctest_command.extend(ctest_args.split(" "))

            run_cmd_no_fail(" ".join(ctest_command),
                            from_dir=label,
                            arg_stdout=None,
                            arg_stderr=subprocess.STDOUT)
Example #9
0
        msg = "case.setup clean complete"
        append_status(msg, caseroot=caseroot, sfile="CaseStatus")

    if not clean:
        models = case.get_values("COMP_CLASSES")

        mach, compiler, debug, mpilib = \
            case.get_value("MACH"), case.get_value("COMPILER"), case.get_value("DEBUG"), case.get_value("MPILIB")
        expect(mach is not None, "xml variable MACH is not set")

        # Create Macros file only if it does not exist
        if not os.path.exists("Macros"):
            logger.debug("Creating Macros file for %s" % mach)
            compilers = Compilers(compiler=compiler,
                                  machine=mach,
                                  os_=case.get_value("OS"),
                                  mpilib=mpilib)
            compilers.write_macros_file()
        else:
            logger.debug("Macros script already created ...skipping")

        # Set tasks to 1 if mpi-serial library
        if mpilib == "mpi-serial":
            for vid, value in case:
                if vid.startswith("NTASKS_") and value != 1:
                    case.set_value(vid, 1)

        # Check ninst.
        # In CIME there can be multiple instances of each component model (an ensemble) NINST is the instance of that component.
        # Save ninst in a dict to use later in apply_user_mods
Example #10
0
 def test_script_rejects_bad_xml(self):
     """The macro writer rejects input that's not valid XML."""
     maker = Compilers(test_utils.MockMachines("mymachine", "SomeOS"),
                       version=2.0)
     with self.assertRaises(ParseError):
         test_utils.get_macros(maker, "This is not valid XML.", "Makefile")
Example #11
0
def configure(machobj,
              output_dir,
              macros_format,
              compiler,
              mpilib,
              debug,
              comp_interface,
              sysos,
              unit_testing=False,
              noenv=False,
              threaded=False,
              extra_machines_dir=None):
    """Add Macros, Depends, and env_mach_specific files to a directory.

    Arguments:
    machobj - Machines argument for this machine.
    output_dir - Directory in which to place output.
    macros_format - Container containing the string 'Makefile' to produce
                    Makefile Macros output, and/or 'CMake' for CMake output.
    compiler - String containing the compiler vendor to configure for.
    mpilib - String containing the MPI implementation to configure for.
    debug - Boolean specifying whether debugging options are enabled.
    unit_testing - Boolean specifying whether we're running unit tests (as
                   opposed to a system run)
    extra_machines_dir - String giving path to an additional directory that will be
                         searched for a config_compilers.xml file.
    """
    # Macros generation.
    suffixes = {'Makefile': 'make', 'CMake': 'cmake'}

    new_cmake_macros_dir = Files(
        comp_interface=comp_interface).get_value("CMAKE_MACROS_DIR")
    macro_maker = None
    for form in macros_format:

        if form == "CMake" and new_cmake_macros_dir is not None and os.path.exists(
                new_cmake_macros_dir
        ) and not "CIME_NO_CMAKE_MACRO" in os.environ:
            if not os.path.isfile(os.path.join(output_dir, "Macros.cmake")):
                safe_copy(os.path.join(new_cmake_macros_dir, "Macros.cmake"),
                          output_dir)
            if not os.path.exists(os.path.join(output_dir, "cmake_macros")):
                shutil.copytree(new_cmake_macros_dir,
                                os.path.join(output_dir, "cmake_macros"))

        else:
            logger.warning("Using deprecated CIME makefile generators")
            if macro_maker is None:
                macro_maker = Compilers(machobj,
                                        compiler=compiler,
                                        mpilib=mpilib,
                                        extra_machines_dir=extra_machines_dir)

            out_file_name = os.path.join(output_dir,
                                         "Macros." + suffixes[form])
            macro_maker.write_macros_file(macros_file=out_file_name,
                                          output_format=suffixes[form])

    copy_depends_files(machobj.get_machine_name(), machobj.machines_dir,
                       output_dir, compiler)
    generate_env_mach_specific(output_dir,
                               machobj,
                               compiler,
                               mpilib,
                               debug,
                               comp_interface,
                               sysos,
                               unit_testing,
                               threaded,
                               noenv=noenv)
Example #12
0
def _case_setup_impl(case, caseroot, casebaseid, clean=False, test_mode=False, reset=False):
###############################################################################
    os.chdir(caseroot)
    msg = "case.setup starting"
    append_status(msg, caseroot=caseroot, sfile="CaseStatus")

    cimeroot = os.environ["CIMEROOT"]

    # Check that $DIN_LOC_ROOT exists - and abort if not a namelist compare tests
    din_loc_root = case.get_value("DIN_LOC_ROOT")
    testcase     = case.get_value("TESTCASE")
    expect(not (not os.path.isdir(din_loc_root) and testcase != "SBN"),
           "inputdata root is not a directory: \"$din_loc_root\" ")

    # Check that userdefine settings are specified before expanding variable
    for vid, value in case:
        expect(not (type(value) is str and "USERDEFINED_required_build" in value),
               "Parameter '%s' must be defined" % vid)

    # Create batch script
    if reset or clean:
        # Clean batch script

        backup_dir = "PESetupHist/b.%s" % time.strftime("%y%m%d-%H%M%S")
        if not os.path.isdir(backup_dir):
            os.makedirs(backup_dir)

        # back up relevant files
        for fileglob in ["case.run", "env_build.xml", "env_mach_pes.xml", "Macros*"]:
            for filename in glob.glob(fileglob):
                shutil.copy(filename, backup_dir)
        if os.path.exists("case.run"):
            os.remove("case.run")

        # only do the following if are NOT in testmode
        if not test_mode:
            # rebuild the models (even on restart)
            case.set_value("BUILD_COMPLETE", False)

            # backup and then clean test script
            if os.path.exists("case.test"):
                shutil.copy("case.test", backup_dir)
                os.remove("case.test")
                logger.info("Successfully cleaned test script case.test")

            if os.path.exists("case.testdriver"):
                shutil.copy("case.testdriver", backup_dir)
                os.remove("case.testdriver")
                logger.info("Successfully cleaned test script case.testdriver")

        logger.info("Successfully cleaned batch script case.run")

        logger.info("Successfully cleaned batch script case.run")
        logger.info("Some files have been saved to %s" % backup_dir)

        msg = "case.setup clean complete"
        append_status(msg, caseroot=caseroot, sfile="CaseStatus")

    if not clean:
        drv_comp = Component()
        models = drv_comp.get_valid_model_components()
        models.remove("DRV")

        mach, compiler, debug, mpilib = \
            case.get_value("MACH"), case.get_value("COMPILER"), case.get_value("DEBUG"), case.get_value("MPILIB")
        expect(mach is not None, "xml variable MACH is not set")

        # Create Macros file only if it does not exist
        if not os.path.exists("Macros"):
            logger.debug("Creating Macros file for %s" % mach)
            compilers = Compilers(compiler=compiler, machine=mach, os_=case.get_value("OS"), mpilib=mpilib)
            compilers.write_macros_file()
        else:
            logger.debug("Macros script already created ...skipping")

        # Set tasks to 1 if mpi-serial library
        if mpilib == "mpi-serial":
            for vid, value in case:
                if vid.startswith("NTASKS_") and value != 1:
                    case.set_value(vid, 1)

        # Check ninst.
        # In CIME there can be multiple instances of each component model (an ensemble) NINST is the instance of that component.
        # Save ninst in a dict to use later in apply_user_mods
        ninst = dict()
        for comp in models:
            comp_model = case.get_value("COMP_%s" % comp)
            ninst[comp_model]  = case.get_value("NINST_%s" % comp)
            ntasks = case.get_value("NTASKS_%s" % comp)
            if ninst[comp_model] > ntasks:
                if ntasks == 1:
                    case.set_value("NTASKS_%s" % comp, ninst[comp_model])
                else:
                    expect(False, "NINST_%s value %d greater than NTASKS_%s %d" % (comp, ninst[comp_model], comp, ntasks))

        expect(not (case.get_value("BUILD_THREADED") and compiler == "nag"),
               "it is not possible to run with OpenMP if using the NAG Fortran compiler")

        if os.path.exists("case.run"):
            logger.info("Machine/Decomp/Pes configuration has already been done ...skipping")
        else:
            _check_pelayouts_require_rebuild(case, models)

            if os.path.exists("LockedFiles/env_build.xml"):
                os.remove("LockedFiles/env_build.xml")

            case.flush()
            check_lockedfiles()

            tm = TaskMaker(case)
            mtpn = case.get_value("MAX_TASKS_PER_NODE")
            pespn = case.get_value("PES_PER_NODE")
            # This is hardcoded because on yellowstone by default we
            # run with 15 pes per node
            # but pay for 16 pes per node.  See github issue #518
            if case.get_value("MACH") == "yellowstone":
                pespn = 16
            pestot = tm.totaltasks
            if mtpn > pespn:
                pestot = pestot * (mtpn // pespn)
                case.set_value("COST_PES", tm.num_nodes*pespn)
            else:
                # reset cost_pes to totalpes
                case.set_value("COST_PES", 0)

            case.set_value("TOTALPES", pestot)

            # Compute cost based on PE count
            pval = 1
            pcnt = 0
            while pval < pestot:
                pval *= 2
                pcnt += 6 # (scaling like sqrt(6/10))
            pcost = 3 - pcnt / 10 # (3 is 64 with 6)

            # Compute cost based on DEBUG
            dcost = 3 if debug else 0

            # Compute cost based on run length
            # For simplicity, we use a heuristic just based on STOP_OPTION (not considering
            # STOP_N), and only deal with options longer than ndays
            lcost = 0
            if "nmonth" in case.get_value("STOP_OPTION"):
                # N months costs 30x as much as N days; since cost is based on log-base-2, add 5
                lcost = 5
            elif "nyear" in case.get_value("STOP_OPTION"):
                # N years costs 365x as much as N days; since cost is based on log-base-2, add 9
                lcost = 9

            estcost = pcost + dcost + lcost
            for cost in ["CCSM_CCOST", "CCSM_GCOST", "CCSM_TCOST", "CCSM_CCOST"]:
                estcost += case.get_value(cost)

            case.set_value("CCSM_PCOST", pcost)
            case.set_value("CCSM_ESTCOST", estcost)

            # create batch file
            logger.info("Creating batch script case.run")

            # Use BatchFactory to get the appropriate instance of a BatchMaker,
            # use it to create our batch scripts
            env_batch = case.get_env("batch")
            for job in env_batch.get_jobs():
                input_batch_script  = os.path.join(case.get_value("MACHDIR"), env_batch.get_value('template', subgroup=job))
                if job == "case.test" and testcase is not None and not test_mode:
                    logger.info("Writing %s script" % job)
                    testscript = os.path.join(cimeroot, "scripts", "Testing", "Testcases", "%s_script" % testcase)
                    # Short term fix to be removed when csh tests are removed
                    if not os.path.exists(testscript):
                        env_batch.make_batch_script(input_batch_script, job, case)
                elif job != "case.test":
                    logger.info("Writing %s script" % job)
                    env_batch.make_batch_script(input_batch_script, job, case)

            # Make a copy of env_mach_pes.xml in order to be able
            # to check that it does not change once case.setup is invoked
            logger.info("Locking file env_mach_pes.xml")
            case.flush()
            shutil.copy("env_mach_pes.xml", "LockedFiles")

        # Create user_nl files for the required number of instances
        if not os.path.exists("user_nl_cpl"):
            logger.info("Creating user_nl_xxx files for components and cpl")
        # loop over models
        for model in models:
            comp = case.get_value("COMP_%s" % model)
            logger.info("Building %s usernl files"%model)
            _build_usernl_files(case, model, comp)
            if comp == "cism":
                run_cmd_no_fail("%s/../components/cism/cime_config/cism.template %s" % (cimeroot, caseroot))

        _build_usernl_files(case, "drv", "cpl")

        user_mods_path = case.get_value("USER_MODS_FULLPATH")
        if user_mods_path is not None:
            apply_user_mods(caseroot, user_mods_path=user_mods_path, ninst=ninst)
        elif case.get_value("TEST"):
            test_mods = parse_test_name(casebaseid)[6]
            if test_mods is not None:
                user_mods_path = os.path.join(case.get_value("TESTS_MODS_DIR"), test_mods)
                apply_user_mods(caseroot, user_mods_path=user_mods_path, ninst=ninst)


        # Run preview namelists for scripts
        logger.info("preview_namelists")
        preview_namelists(case)

        logger.info("See ./CaseDoc for component namelists")
        logger.info("If an old case build already exists, might want to run \'case.build --clean\' before building")

        # Create test script if appropriate
        # Short term fix to be removed when csh tests are removed
        if os.path.exists("env_test.xml"):
            if not os.path.exists("case.test"):
                logger.info("Starting testcase.setup")
                run_cmd_no_fail("./testcase.setup -caseroot %s" % caseroot)
                logger.info("Finished testcase.setup")

        msg = "case.setup complete"
        append_status(msg, caseroot=caseroot, sfile="CaseStatus")

        # Record env information
        env_module = case.get_env("mach_specific")
        env_module.make_env_mach_specific_file(compiler, debug, mpilib, "sh")
        env_module.make_env_mach_specific_file(compiler, debug, mpilib, "csh")
        with open("software_environment.txt", "w") as f:
            f.write(env_module.list_modules())
        run_cmd_no_fail("echo -e '\n' >> software_environment.txt && \
                         env >> software_environment.txt")
Example #13
0
def configure(
    machobj,
    output_dir,
    macros_format,
    compiler,
    mpilib,
    debug,
    comp_interface,
    sysos,
    unit_testing=False,
    noenv=False,
    threaded=False,
    extra_machines_dir=None,
):
    """Add Macros, Depends, and env_mach_specific files to a directory.

    Arguments:
    machobj - Machines argument for this machine.
    output_dir - Directory in which to place output.
    macros_format - Container containing the string 'Makefile' to produce
                    Makefile Macros output, and/or 'CMake' for CMake output.
    compiler - String containing the compiler vendor to configure for.
    mpilib - String containing the MPI implementation to configure for.
    debug - Boolean specifying whether debugging options are enabled.
    unit_testing - Boolean specifying whether we're running unit tests (as
                   opposed to a system run)
    extra_machines_dir - String giving path to an additional directory that will be
                         searched for a config_compilers.xml file.
    """
    # Macros generation.
    suffixes = {"Makefile": "make", "CMake": "cmake"}

    new_cmake_macros_dir = Files(
        comp_interface=comp_interface).get_value("CMAKE_MACROS_DIR")
    macro_maker = None
    for form in macros_format:

        if (new_cmake_macros_dir is not None
                and os.path.exists(new_cmake_macros_dir)
                and not "CIME_NO_CMAKE_MACRO" in os.environ):

            if not os.path.isfile(os.path.join(output_dir, "Macros.cmake")):
                safe_copy(os.path.join(new_cmake_macros_dir, "Macros.cmake"),
                          output_dir)
            if not os.path.exists(os.path.join(output_dir, "cmake_macros")):
                shutil.copytree(new_cmake_macros_dir,
                                os.path.join(output_dir, "cmake_macros"))

            # Grab macros from extra machine dir if it was provided
            if extra_machines_dir:
                extra_cmake_macros = glob.glob(
                    "{}/cmake_macros/*.cmake".format(extra_machines_dir))
                for extra_cmake_macro in extra_cmake_macros:
                    safe_copy(extra_cmake_macro, new_cmake_macros_dir)

            if form == "Makefile":
                # Use the cmake macros to generate the make macros
                cmake_args = " -DOS={} -DMACH={} -DCOMPILER={} -DDEBUG={} -DMPILIB={} -Dcompile_threaded={} -DCASEROOT={}".format(
                    sysos,
                    machobj.get_machine_name(),
                    compiler,
                    stringify_bool(debug),
                    mpilib,
                    stringify_bool(threaded),
                    output_dir,
                )

                with CmakeTmpBuildDir(macroloc=output_dir) as cmaketmp:
                    output = cmaketmp.get_makefile_vars(cmake_args=cmake_args)

                with open(os.path.join(output_dir, "Macros.make"), "w") as fd:
                    fd.write(output)

        else:
            logger.warning("Using deprecated CIME makefile generators")
            if macro_maker is None:
                macro_maker = Compilers(
                    machobj,
                    compiler=compiler,
                    mpilib=mpilib,
                    extra_machines_dir=extra_machines_dir,
                )

            out_file_name = os.path.join(output_dir,
                                         "Macros." + suffixes[form])
            macro_maker.write_macros_file(macros_file=out_file_name,
                                          output_format=suffixes[form])

    copy_depends_files(machobj.get_machine_name(), machobj.machines_dir,
                       output_dir, compiler)
    generate_env_mach_specific(
        output_dir,
        machobj,
        compiler,
        mpilib,
        debug,
        comp_interface,
        sysos,
        unit_testing,
        threaded,
        noenv=noenv,
    )
Example #14
0
def _case_setup_impl(case, caseroot, casebaseid, clean=False, test_mode=False, reset=False):
###############################################################################
    os.chdir(caseroot)
    msg = "case.setup starting"
    append_status(msg, caseroot=caseroot, sfile="CaseStatus")

    cimeroot = get_cime_root(case)

    # Check that $DIN_LOC_ROOT exists - and abort if not a namelist compare tests
    din_loc_root = case.get_value("DIN_LOC_ROOT")
    testcase     = case.get_value("TESTCASE")
    expect(not (not os.path.isdir(din_loc_root) and testcase != "SBN"),
           "inputdata root is not a directory: \"$din_loc_root\" ")

    # Check that userdefine settings are specified before expanding variable
    for vid, value in case:
        expect(not (type(value) is str and "USERDEFINED_required_build" in value),
               "Parameter '%s' must be defined" % vid)

    # Create batch script
    if reset or clean:
        # Clean batch script

        backup_dir = "PESetupHist/b.%s" % time.strftime("%y%m%d-%H%M%S")
        if not os.path.isdir(backup_dir):
            os.makedirs(backup_dir)

        # back up relevant files
        for fileglob in ["case.run", "env_build.xml", "env_mach_pes.xml", "Macros*"]:
            for filename in glob.glob(fileglob):
                shutil.copy(filename, backup_dir)
        if os.path.exists("case.run"):
            os.remove("case.run")

        # only do the following if are NOT in testmode
        if not test_mode:
            # rebuild the models (even on restart)
            case.set_value("BUILD_COMPLETE", False)

            # backup and then clean test script
            if os.path.exists("case.test"):
                shutil.copy("case.test", backup_dir)
                os.remove("case.test")
                logger.info("Successfully cleaned test script case.test")

            if os.path.exists("case.testdriver"):
                shutil.copy("case.testdriver", backup_dir)
                os.remove("case.testdriver")
                logger.info("Successfully cleaned test script case.testdriver")

        logger.info("Successfully cleaned batch script case.run")

        logger.info("Successfully cleaned batch script case.run")
        logger.info("Some files have been saved to %s" % backup_dir)

        msg = "case.setup clean complete"
        append_status(msg, caseroot=caseroot, sfile="CaseStatus")

    if not clean:
        models = case.get_values("COMP_CLASSES")

        mach, compiler, debug, mpilib = \
            case.get_value("MACH"), case.get_value("COMPILER"), case.get_value("DEBUG"), case.get_value("MPILIB")
        expect(mach is not None, "xml variable MACH is not set")

        # Create Macros file only if it does not exist
        if not os.path.exists("Macros"):
            logger.debug("Creating Macros file for %s" % mach)
            compilers = Compilers(compiler=compiler, machine=mach, os_=case.get_value("OS"), mpilib=mpilib)
            compilers.write_macros_file()
        else:
            logger.debug("Macros script already created ...skipping")

        # Set tasks to 1 if mpi-serial library
        if mpilib == "mpi-serial":
            for vid, value in case:
                if vid.startswith("NTASKS_") and value != 1:
                    case.set_value(vid, 1)

        # Check ninst.
        # In CIME there can be multiple instances of each component model (an ensemble) NINST is the instance of that component.
        # Save ninst in a dict to use later in apply_user_mods
        ninst = dict()
        for comp in models:
            if comp == "DRV":
                continue
            comp_model = case.get_value("COMP_%s" % comp)
            ninst[comp_model]  = case.get_value("NINST_%s" % comp)
            ntasks = case.get_value("NTASKS_%s" % comp)
            if ninst[comp_model] > ntasks:
                if ntasks == 1:
                    case.set_value("NTASKS_%s" % comp, ninst[comp_model])
                else:
                    expect(False, "NINST_%s value %d greater than NTASKS_%s %d" % (comp, ninst[comp_model], comp, ntasks))

        if os.path.exists("case.run"):
            logger.info("Machine/Decomp/Pes configuration has already been done ...skipping")
        else:
            _check_pelayouts_require_rebuild(case, models)

            if os.path.exists("LockedFiles/env_build.xml"):
                os.remove("LockedFiles/env_build.xml")

            case.flush()
            check_lockedfiles()
            env_mach_pes = case.get_env("mach_pes")
            pestot = env_mach_pes.get_total_tasks(models)
            logger.debug("at update TOTALPES = %s"%pestot)
            case.set_value("TOTALPES", pestot)
            thread_count = env_mach_pes.get_max_thread_count(models)
            if thread_count > 1:
                case.set_value("BUILD_THREADED", True)

            expect(not (case.get_value("BUILD_THREADED")  and compiler == "nag"),
                   "it is not possible to run with OpenMP if using the NAG Fortran compiler")


            cost_pes = env_mach_pes.get_cost_pes(pestot, thread_count, machine=case.get_value("MACH"))
            case.set_value("COST_PES", cost_pes)

            # create batch file
            logger.info("Creating batch script case.run")

            # Use BatchFactory to get the appropriate instance of a BatchMaker,
            # use it to create our batch scripts
            env_batch = case.get_env("batch")
            num_nodes = env_mach_pes.get_total_nodes(pestot, thread_count)
            tasks_per_node = env_mach_pes.get_tasks_per_node(pestot, thread_count)
            for job in env_batch.get_jobs():
                input_batch_script  = os.path.join(case.get_value("MACHDIR"), env_batch.get_value('template', subgroup=job))
                if job == "case.test" and testcase is not None and not test_mode:
                    logger.info("Writing %s script" % job)
                    testscript = os.path.join(cimeroot, "scripts", "Testing", "Testcases", "%s_script" % testcase)
                    # Short term fix to be removed when csh tests are removed
                    if not os.path.exists(testscript):
                        env_batch.make_batch_script(input_batch_script, job, case, pestot, tasks_per_node, num_nodes, thread_count)
                elif job != "case.test":
                    logger.info("Writing %s script from input template %s" % (job, input_batch_script))
                    env_batch.make_batch_script(input_batch_script, job, case, pestot, tasks_per_node, num_nodes, thread_count)

            # Make a copy of env_mach_pes.xml in order to be able
            # to check that it does not change once case.setup is invoked
            logger.info("Locking file env_mach_pes.xml")
            case.flush()
            logger.debug("at copy TOTALPES = %s"%case.get_value("TOTALPES"))
            shutil.copy("env_mach_pes.xml", "LockedFiles")

        # Create user_nl files for the required number of instances
        if not os.path.exists("user_nl_cpl"):
            logger.info("Creating user_nl_xxx files for components and cpl")
        # loop over models
        for model in models:
            comp = case.get_value("COMP_%s" % model)
            logger.info("Building %s usernl files"%model)
            _build_usernl_files(case, model, comp)
            if comp == "cism":
                run_cmd_no_fail("%s/../components/cism/cime_config/cism.template %s" % (cimeroot, caseroot))

        _build_usernl_files(case, "drv", "cpl")

        user_mods_path = case.get_value("USER_MODS_FULLPATH")
        if user_mods_path is not None:
            apply_user_mods(caseroot, user_mods_path=user_mods_path, ninst=ninst)
        elif case.get_value("TEST"):
            test_mods = parse_test_name(casebaseid)[6]
            if test_mods is not None:
                user_mods_path = os.path.join(case.get_value("TESTS_MODS_DIR"), test_mods)
                apply_user_mods(caseroot, user_mods_path=user_mods_path, ninst=ninst)


        # Run preview namelists for scripts
        logger.info("preview_namelists")
        preview_namelists(case)

        logger.info("See ./CaseDoc for component namelists")
        logger.info("If an old case build already exists, might want to run \'case.build --clean\' before building")

        # Create test script if appropriate
        # Short term fix to be removed when csh tests are removed
        if os.path.exists("env_test.xml"):
            if not os.path.exists("case.test"):
                logger.info("Starting testcase.setup")
                run_cmd_no_fail("./testcase.setup -caseroot %s" % caseroot)
                logger.info("Finished testcase.setup")

        msg = "case.setup complete"
        append_status(msg, caseroot=caseroot, sfile="CaseStatus")

        # Record env information
        env_module = case.get_env("mach_specific")
        env_module.make_env_mach_specific_file(compiler, debug, mpilib, "sh")
        env_module.make_env_mach_specific_file(compiler, debug, mpilib, "csh")
        with open("software_environment.txt", "w") as f:
            f.write(env_module.list_modules())
        run_cmd_no_fail("echo -e '\n' >> software_environment.txt && \
                         env >> software_environment.txt")