Exemplo n.º 1
0
 def get_default_compiler(self):
     """
     Get the compiler to use from the list of COMPILERS
     """
     cime_config = get_cime_config()
     if cime_config.has_option('main','COMPILER'):
         value = cime_config.get('main', 'COMPILER')
         expect(self.is_valid_compiler(value), "User-selected compiler {} is not supported on machine {}".format(value, self.machine))
     else:
         value = self.get_field_from_list("COMPILERS")
     return value
Exemplo n.º 2
0
 def get_default_compiler(self):
     """
     Get the compiler to use from the list of COMPILERS
     """
     cime_config = get_cime_config()
     if cime_config.has_option('main','COMPILER'):
         value = cime_config.get('main', 'COMPILER')
         expect(self.is_valid_compiler(value), "User-selected compiler {} is not supported on machine {}".format(value, self.machine))
     else:
         value = self.get_field_from_list("COMPILERS")
     return value
Exemplo n.º 3
0
    def __init__(self, infile=None, files=None, machine=None):
        """
        initialize an object
        if a filename is provided it will be used,
        otherwise if a files object is provided it will be used
        otherwise create a files object from default values
        """

        self.machine_node = None
        self.machine = None
        self.machines_dir = None
        self.custom_settings = {}
        schema = None
        if files is None:
            files = Files()
        if infile is None:
            infile = files.get_value("MACHINES_SPEC_FILE")
        schema = files.get_schema("MACHINES_SPEC_FILE")
        logger.debug("Verifying using schema {}".format(schema))

        self.machines_dir = os.path.dirname(infile)

        GenericXML.__init__(self, infile, schema)

        # Append the contents of $HOME/.cime/config_machines.xml if it exists
        # This could cause problems if node matchs are repeated when only one is expected
        local_infile = os.path.join(os.environ.get("HOME"), ".cime",
                                    "config_machines.xml")
        logger.debug("Infile: {}".format(local_infile))
        if os.path.exists(local_infile):
            GenericXML.read(self, local_infile, schema)

        if machine is None:
            if "CIME_MACHINE" in os.environ:
                machine = os.environ["CIME_MACHINE"]
            else:
                cime_config = get_cime_config()
                if cime_config.has_option("main", "machine"):
                    machine = cime_config.get("main", "machine")
                if machine is None:
                    machine = self.probe_machine_name()

        expect(
            machine is not None,
            "Could not initialize machine object from {} or {}".format(
                infile, local_infile))
        self.set_machine(machine)
Exemplo n.º 4
0
    def __init__(self, infile=None, files=None, machine=None):
        """
        initialize an object
        if a filename is provided it will be used,
        otherwise if a files object is provided it will be used
        otherwise create a files object from default values
        """

        self.machine_node = None
        self.machine = None
        self.machines_dir = None
        schema = None
        if files is None:
            files = Files()
        if infile is None:
            infile = files.get_value("MACHINES_SPEC_FILE")
        schema = files.get_schema("MACHINES_SPEC_FILE")
        logger.debug("Verifying using schema {}".format(schema))

        self.machines_dir = os.path.dirname(infile)

        GenericXML.__init__(self, infile, schema)

        # Append the contents of $HOME/.cime/config_machines.xml if it exists
        # This could cause problems if node matchs are repeated when only one is expected
        local_infile = os.path.join(os.environ.get("HOME"),".cime","config_machines.xml")
        logger.debug("Infile: {}".format(local_infile))
        if os.path.exists(local_infile):
            GenericXML.read(self, local_infile, schema)

        if machine is None:
            if "CIME_MACHINE" in os.environ:
                machine = os.environ["CIME_MACHINE"]
            else:
                cime_config = get_cime_config()
                if cime_config.has_option("main", "machine"):
                    machine = cime_config.get("main", "machine")
                if machine is None:
                    machine = self.probe_machine_name()

        expect(machine is not None, "Could not initialize machine object from {} or {}".format(infile, local_infile))
        self.set_machine(machine)
Exemplo n.º 5
0
            expect(dep_string is not None, "'depend_string' is not defined for this batch system")

            separator_string = self.get_value("depend_separator", subgroup=None)
            expect(separator_string is not None,"depend_separator string not defined")

            expect("jobid" in dep_string, "depend_string is missing jobid for prerequisite jobs")
            dep_ids_str = str(dep_jobs[0])
            for dep_id in dep_jobs[1:]:
                dep_ids_str += separator_string + str(dep_id)
            dep_string = dep_string.replace("jobid",dep_ids_str.strip()) # pylint: disable=maybe-no-member
            submitargs += " " + dep_string

        if batch_args is not None:
            submitargs += " " + batch_args

        cime_config = get_cime_config()

        if mail_user is None and cime_config.has_option("main", "MAIL_USER"):
            mail_user = cime_config.get("main", "MAIL_USER")

        if mail_user is not None:
            mail_user_flag = self.get_value('batch_mail_flag', subgroup=None)
            if mail_user_flag is not None:
                submitargs += " " + mail_user_flag + " " + mail_user

        if mail_type is None:
            if job == "case.test" and cime_config.has_option("create_test", "MAIL_TYPE"):
                mail_type = cime_config.get("create_test", "MAIL_TYPE")
            elif cime_config.has_option("main", "MAIL_TYPE"):
                mail_type = cime_config.get("main", "MAIL_TYPE")
            else:
Exemplo n.º 6
0
    def _submit_single_job(self, case, job, dep_jobs=None, allow_fail=False,
                           no_batch=False, skip_pnl=False, mail_user=None, mail_type=None,
                           batch_args=None, dry_run=False, resubmit_immediate=False):
        if not dry_run:
            logger.warning("Submit job {}".format(job))
        batch_system = self.get_value("BATCH_SYSTEM", subgroup=None)
        if batch_system is None or batch_system == "none" or no_batch:
            logger.info("Starting job script {}".format(job))
            function_name = job.replace(".", "_")
            if not dry_run:
                args = self._build_run_args(job, True, skip_pnl=skip_pnl, set_continue_run=resubmit_immediate,
                                            submit_resubmits=not resubmit_immediate)
                try:
                    getattr(case, function_name)(**{k: v for k, (v, _) in args.items()})
                except Exception as e:
                    # We don't want exception from the run phases getting into submit phase
                    logger.warning("Exception from {}: {}".format(function_name, str(e)))

            return

        submitargs = self.get_submit_args(case, job)
        args_override = self.get_value("BATCH_COMMAND_FLAGS", subgroup=job)
        if args_override:
            submitargs = args_override

        if dep_jobs is not None and len(dep_jobs) > 0:
            logger.debug("dependencies: {}".format(dep_jobs))
            if allow_fail:
                dep_string = self.get_value("depend_allow_string", subgroup=None)
                if dep_string is None:
                    logger.warning("'depend_allow_string' is not defined for this batch system, " +
                                   "falling back to the 'depend_string'")
                    dep_string = self.get_value("depend_string", subgroup=None)
            else:
                dep_string = self.get_value("depend_string", subgroup=None)
            expect(dep_string is not None, "'depend_string' is not defined for this batch system")

            separator_string = self.get_value("depend_separator", subgroup=None)
            expect(separator_string is not None,"depend_separator string not defined")

            expect("jobid" in dep_string, "depend_string is missing jobid for prerequisite jobs")
            dep_ids_str = str(dep_jobs[0])
            for dep_id in dep_jobs[1:]:
                dep_ids_str += separator_string + str(dep_id)
            dep_string = dep_string.replace("jobid",dep_ids_str.strip()) # pylint: disable=maybe-no-member
            submitargs += " " + dep_string

        if batch_args is not None:
            submitargs += " " + batch_args

        cime_config = get_cime_config()

        if mail_user is None and cime_config.has_option("main", "MAIL_USER"):
            mail_user = cime_config.get("main", "MAIL_USER")

        if mail_user is not None:
            mail_user_flag = self.get_value('batch_mail_flag', subgroup=None)
            if mail_user_flag is not None:
                submitargs += " " + mail_user_flag + " " + mail_user

        if mail_type is None:
            if job == "case.test" and cime_config.has_option("create_test", "MAIL_TYPE"):
                mail_type = cime_config.get("create_test", "MAIL_TYPE")
            elif cime_config.has_option("main", "MAIL_TYPE"):
                mail_type = cime_config.get("main", "MAIL_TYPE")
            else:
                mail_type = self.get_value("batch_mail_default")

            if mail_type:
                mail_type = mail_type.split(",") # pylint: disable=no-member

        if mail_type:
            mail_type_flag = self.get_value("batch_mail_type_flag", subgroup=None)
            if mail_type_flag is not None:
                mail_type_args = []
                for indv_type in mail_type:
                    mail_type_arg = self.get_batch_mail_type(indv_type)
                    mail_type_args.append(mail_type_arg)

                if mail_type_flag == "-m":
                    # hacky, PBS-type systems pass multiple mail-types differently
                    submitargs += " {} {}".format(mail_type_flag, "".join(mail_type_args))
                else:
                    submitargs += " {} {}".format(mail_type_flag, " {} ".format(mail_type_flag).join(mail_type_args))
        batchsubmit = self.get_value("batch_submit", subgroup=None)
        expect(batchsubmit is not None,
               "Unable to determine the correct command for batch submission.")
        batchredirect = self.get_value("batch_redirect", subgroup=None)
        batch_env_flag = self.get_value("batch_env", subgroup=None)
        run_args = self._build_run_args_str(job, False, skip_pnl=skip_pnl, set_continue_run=resubmit_immediate,
                                            submit_resubmits=not resubmit_immediate)
        if batch_env_flag:
            sequence = (batchsubmit, submitargs, run_args, batchredirect, get_batch_script_for_job(job))
        else:
            sequence = (batchsubmit, submitargs, batchredirect, get_batch_script_for_job(job), run_args)

        submitcmd = " ".join(s.strip() for s in sequence if s is not None)

        if dry_run:
            return submitcmd
        else:
            logger.info("Submitting job script {}".format(submitcmd))
            output = run_cmd_no_fail(submitcmd, combine_output=True)
            jobid = self.get_job_id(output)
            logger.info("Submitted job id is {}".format(jobid))
            return jobid
Exemplo n.º 7
0
    def __init__(self,
                 infile=None,
                 files=None,
                 machine=None,
                 extra_machines_dir=None):
        """
        initialize an object
        if a filename is provided it will be used,
        otherwise if a files object is provided it will be used
        otherwise create a files object from default values

        If extra_machines_dir is provided, it should be a string giving a path to an
        additional directory that will be searched for a config_machines.xml file; if
        found, the contents of this file will be appended to the standard
        config_machines.xml. An empty string is treated the same as None.
        """

        self.machine_node = None
        self.machine = None
        self.machines_dir = None
        self.custom_settings = {}
        schema = None
        if files is None:
            files = Files()
        if infile is None:
            infile = files.get_value("MACHINES_SPEC_FILE")
        schema = files.get_schema("MACHINES_SPEC_FILE")
        logger.debug("Verifying using schema {}".format(schema))

        self.machines_dir = os.path.dirname(infile)

        GenericXML.__init__(self, infile, schema)

        # Append the contents of $HOME/.cime/config_machines.xml if it exists.
        #
        # Also append the contents of a config_machines.xml file in the directory given by
        # extra_machines_dir, if present.
        #
        # This could cause problems if node matches are repeated when only one is expected.
        local_infile = os.path.join(os.environ.get("HOME"), ".cime",
                                    "config_machines.xml")
        logger.debug("Infile: {}".format(local_infile))
        if os.path.exists(local_infile):
            GenericXML.read(self, local_infile, schema)
        if extra_machines_dir:
            local_infile = os.path.join(extra_machines_dir,
                                        "config_machines.xml")
            logger.debug("Infile: {}".format(local_infile))
            if os.path.exists(local_infile):
                GenericXML.read(self, local_infile, schema)

        if machine is None:
            if "CIME_MACHINE" in os.environ:
                machine = os.environ["CIME_MACHINE"]
            else:
                cime_config = get_cime_config()
                if cime_config.has_option("main", "machine"):
                    machine = cime_config.get("main", "machine")
                if machine is None:
                    machine = self.probe_machine_name()

        expect(
            machine is not None,
            "Could not initialize machine object from {} or {}".format(
                infile, local_infile))
        self.set_machine(machine)
Exemplo n.º 8
0
    def _submit_single_job(self, case, job, dep_jobs=None, no_batch=False,
                           skip_pnl=False, mail_user=None, mail_type=None,
                           batch_args=None, dry_run=False):
        if not dry_run:
            logger.warning("Submit job {}".format(job))
        batch_system = self.get_value("BATCH_SYSTEM", subgroup=None)
        if batch_system is None or batch_system == "none" or no_batch:
            logger.info("Starting job script {}".format(job))
            function_name = job.replace(".", "_")
            if not dry_run:
                if "archive" not in function_name:
                    getattr(case,function_name)(skip_pnl=skip_pnl)
                else:
                    getattr(case,function_name)()

            return

        submitargs = self.get_submit_args(case, job)
        args_override = self.get_value("BATCH_COMMAND_FLAGS", subgroup=job)
        if args_override:
            submitargs = args_override

        if dep_jobs is not None and len(dep_jobs) > 0:
            logger.info("dependencies: {}".format(dep_jobs))
            dep_string = self.get_value("depend_string", subgroup=None)
            separator_string = self.get_value("depend_separator", subgroup=None)
            expect(separator_string is not None,"depend_separator string not defined")
            expect("jobid" in dep_string, "depend_string is missing jobid for prerequisite jobs")
            dep_ids_str = str(dep_jobs[0])
            for dep_id in dep_jobs[1:]:
                dep_ids_str += separator_string + str(dep_id)
            dep_string = dep_string.replace("jobid",dep_ids_str.strip()) # pylint: disable=maybe-no-member
            submitargs += " " + dep_string

        if batch_args is not None:
            submitargs += " " + batch_args

        cime_config = get_cime_config()

        if mail_user is None and cime_config.has_option("main", "MAIL_USER"):
            mail_user = cime_config.get("main", "MAIL_USER")

        if mail_user is not None:
            mail_user_flag = self.get_value('batch_mail_flag', subgroup=None)
            if mail_user_flag is not None:
                submitargs += " " + mail_user_flag + " " + mail_user

        if mail_type is None:
            if job == "case.test" and cime_config.has_option("create_test", "MAIL_TYPE"):
                mail_type = cime_config.get("create_test", "MAIL_TYPE")
            elif cime_config.has_option("main", "MAIL_TYPE"):
                mail_type = cime_config.get("main", "MAIL_TYPE")
            else:
                mail_type = self.get_value("batch_mail_default")

            if mail_type:
                mail_type = mail_type.split(",") # pylint: disable=no-member

        if mail_type:
            mail_type_flag = self.get_value("batch_mail_type_flag", subgroup=None)
            if mail_type_flag is not None:
                mail_type_args = []
                for indv_type in mail_type:
                    mail_type_arg = self.get_batch_mail_type(indv_type)
                    mail_type_args.append(mail_type_arg)

                if mail_type_flag == "-m":
                    # hacky, PBS-type systems pass multiple mail-types differently
                    submitargs += " {} {}".format(mail_type_flag, "".join(mail_type_args))
                else:
                    submitargs += " {} {}".format(mail_type_flag, " {} ".format(mail_type_flag).join(mail_type_args))
        batchsubmit = self.get_value("batch_submit", subgroup=None)
        expect(batchsubmit is not None,
               "Unable to determine the correct command for batch submission.")
        batchredirect = self.get_value("batch_redirect", subgroup=None)
        submitcmd = ''
        batch_env_flag = self.get_value("batch_env", subgroup=None)
        if batch_env_flag:
            sequence = (batchsubmit, submitargs, "skip_pnl", batchredirect, get_batch_script_for_job(job))
        else:
            sequence = (batchsubmit, submitargs, batchredirect, get_batch_script_for_job(job), "skip_pnl")

        for string in sequence:
            if string == "skip_pnl":
                if job in ['case.run', 'case.test'] and skip_pnl:
                    batch_env_flag = self.get_value("batch_env", subgroup=None)
                    if not batch_env_flag:
                        submitcmd += " --skip-preview-namelist "
                    else:
                        submitcmd += " {} ARGS_FOR_SCRIPT='--skip-preview-namelist' ".format(batch_env_flag)

            elif string is not None:
                submitcmd += string + " "

        if dry_run:
            return submitcmd
        else:
            logger.info("Submitting job script {}".format(submitcmd))
            output = run_cmd_no_fail(submitcmd, combine_output=True)
            jobid = self.get_job_id(output)
            logger.info("Submitted job id is {}".format(jobid))
            return jobid
Exemplo n.º 9
0
def parse_command_line(args, description):
    ###############################################################################

    parser = argparse.ArgumentParser(
        description=description, formatter_class=RawTextHelpFormatter
    )

    model = CIME.utils.get_model()

    CIME.utils.setup_standard_logging_options(parser)

    config = get_cime_config()

    parser.add_argument(
        "--no-run", action="store_true", help="Do not run generated tests"
    )

    parser.add_argument(
        "--no-build",
        action="store_true",
        help="Do not build generated tests, implies --no-run",
    )

    parser.add_argument(
        "--no-setup",
        action="store_true",
        help="Do not setup generated tests, implies --no-build and --no-run",
    )

    parser.add_argument(
        "-u",
        "--use-existing",
        action="store_true",
        help="Use pre-existing case directories they will pick up at the "
        "\nlatest PEND state or re-run the first failed state. Requires test-id",
    )

    default = get_default_setting(config, "SAVE_TIMING", False, check_main=False)

    parser.add_argument(
        "--save-timing",
        action="store_true",
        default=default,
        help="Enable archiving of performance data.",
    )

    parser.add_argument(
        "--no-batch",
        action="store_true",
        help="Do not submit jobs to batch system, run locally."
        "\nIf false, this will default to machine setting.",
    )

    parser.add_argument(
        "--single-exe",
        action="store_true",
        default=False,
        help="Use a single build for all cases. This can "
        "\ndrastically improve test throughput but is currently use-at-your-own risk."
        "\nIt's up to the user to ensure that all cases are build-compatible."
        "\nE3SM tests belonging to a suite with share enabled will always share exes.",
    )

    default = get_default_setting(config, "SINGLE_SUBMIT", False, check_main=False)

    parser.add_argument(
        "--single-submit",
        action="store_true",
        default=default,
        help="Use a single interactive allocation to run all the tests. This can "
        "\ndrastically reduce queue waiting but only makes sense on batch machines.",
    )

    default = get_default_setting(config, "TEST_ROOT", None, check_main=False)

    parser.add_argument(
        "-r",
        "--test-root",
        default=default,
        help="Where test cases will be created. The default is output root"
        "\nas defined in the config_machines file",
    )

    default = get_default_setting(config, "OUTPUT_ROOT", None, check_main=False)

    parser.add_argument(
        "--output-root", default=default, help="Where the case output is written."
    )

    default = get_default_setting(config, "BASELINE_ROOT", None, check_main=False)

    parser.add_argument(
        "--baseline-root",
        default=default,
        help="Specifies a root directory for baseline datasets that will "
        "\nbe used for Bit-for-bit generate and/or compare testing.",
    )

    default = get_default_setting(config, "CLEAN", False, check_main=False)

    parser.add_argument(
        "--clean",
        action="store_true",
        default=default,
        help="Specifies if tests should be cleaned after run. If set, all object"
        "\nexecutables and data files will be removed after the tests are run.",
    )

    default = get_default_setting(config, "MACHINE", None, check_main=True)

    parser.add_argument(
        "-m",
        "--machine",
        default=default,
        help="The machine for creating and building tests. This machine must be defined"
        "\nin the config_machines.xml file for the given model. The default is to "
        "\nto match the name of the machine in the test name or the name of the "
        "\nmachine this script is run on to the NODENAME_REGEX field in "
        "\nconfig_machines.xml. WARNING: This option is highly unsafe and should "
        "\nonly be used if you are an expert.",
    )

    default = get_default_setting(config, "MPILIB", None, check_main=True)

    parser.add_argument(
        "--mpilib",
        default=default,
        help="Specify the mpilib. To see list of supported MPI libraries for each machine, "
        "\ninvoke ./query_config. The default is the first listing .",
    )

    if model in ["cesm", "ufs"]:
        parser.add_argument(
            "-c",
            "--compare",
            help="While testing, compare baselines against the given compare directory. ",
        )

        parser.add_argument(
            "-g",
            "--generate",
            help="While testing, generate baselines in the given generate directory. "
            "\nNOTE: this can also be done after the fact with bless_test_results",
        )

        parser.add_argument(
            "--xml-machine",
            help="Use this machine key in the lookup in testlist.xml. "
            "\nThe default is all if any --xml- argument is used.",
        )

        parser.add_argument(
            "--xml-compiler",
            help="Use this compiler key in the lookup in testlist.xml. "
            "\nThe default is all if any --xml- argument is used.",
        )

        parser.add_argument(
            "--xml-category",
            help="Use this category key in the lookup in testlist.xml. "
            "\nThe default is all if any --xml- argument is used.",
        )

        parser.add_argument(
            "--xml-testlist",
            help="Use this testlist to lookup tests.The default is specified in config_files.xml",
        )

        parser.add_argument(
            "--xml-driver",
            choices=("mct", "nuopc", "moab"),
            help="Override driver specified in tests and use this one.",
        )

        parser.add_argument(
            "testargs",
            nargs="*",
            help="Tests to run. Testname form is TEST.GRID.COMPSET[.MACHINE_COMPILER]",
        )

    else:

        parser.add_argument(
            "testargs",
            nargs="+",
            help="Tests or test suites to run."
            " Testname form is TEST.GRID.COMPSET[.MACHINE_COMPILER]",
        )

        parser.add_argument(
            "-b",
            "--baseline-name",
            help="If comparing or generating baselines, use this directory under baseline root. "
            "\nDefault will be current branch name.",
        )

        parser.add_argument(
            "-c",
            "--compare",
            action="store_true",
            help="While testing, compare baselines",
        )

        parser.add_argument(
            "-g",
            "--generate",
            action="store_true",
            help="While testing, generate baselines. "
            "\nNOTE: this can also be done after the fact with bless_test_results",
        )

    default = get_default_setting(config, "COMPILER", None, check_main=True)

    parser.add_argument(
        "--compiler",
        default=default,
        help="Compiler for building cime. Default will be the name in the "
        "\nTestname or the default defined for the machine.",
    )

    parser.add_argument(
        "-n",
        "--namelists-only",
        action="store_true",
        help="Only perform namelist actions for tests",
    )

    parser.add_argument(
        "-p",
        "--project",
        help="Specify a project id for the case (optional)."
        "\nUsed for accounting and directory permissions when on a batch system."
        "\nThe default is user or machine specified by PROJECT."
        "\nAccounting (only) may be overridden by user or machine specified CHARGE_ACCOUNT.",
    )

    parser.add_argument(
        "-t",
        "--test-id",
        help="Specify an 'id' for the test. This is simply a string that is appended "
        "\nto the end of a test name. If no test-id is specified, a time stamp plus a "
        "\nrandom string will be used (ensuring a high probability of uniqueness). "
        "\nIf a test-id is specified, it is the user's responsibility to ensure that "
        "\neach run of create_test uses a unique test-id. WARNING: problems will occur "
        "\nif you use the same test-id twice on the same file system, even if the test "
        "\nlists are completely different.",
    )

    default = get_default_setting(config, "PARALLEL_JOBS", None, check_main=False)

    parser.add_argument(
        "-j",
        "--parallel-jobs",
        type=int,
        default=default,
        help="Number of tasks create_test should perform simultaneously. The default "
        "\n is min(num_cores, num_tests).",
    )

    default = get_default_setting(config, "PROC_POOL", None, check_main=False)

    parser.add_argument(
        "--proc-pool",
        type=int,
        default=default,
        help="The size of the processor pool that create_test can use. The default is "
        "\nMAX_MPITASKS_PER_NODE + 25 percent.",
    )

    default = os.getenv("CIME_GLOBAL_WALLTIME")
    if default is None:
        default = get_default_setting(config, "WALLTIME", None, check_main=True)

    parser.add_argument(
        "--walltime",
        default=default,
        help="Set the wallclock limit for all tests in the suite. "
        "\nUse the variable CIME_GLOBAL_WALLTIME to set this for all tests.",
    )

    default = get_default_setting(config, "JOB_QUEUE", None, check_main=True)

    parser.add_argument(
        "-q",
        "--queue",
        default=default,
        help="Force batch system to use a certain queue",
    )

    parser.add_argument(
        "-f", "--testfile", help="A file containing an ascii list of tests to run"
    )

    default = get_default_setting(
        config, "ALLOW_BASELINE_OVERWRITE", False, check_main=False
    )

    parser.add_argument(
        "-o",
        "--allow-baseline-overwrite",
        action="store_true",
        default=default,
        help="If the --generate option is given, then an attempt to overwrite "
        "\nan existing baseline directory will raise an error. WARNING: Specifying this "
        "\noption will allow existing baseline directories to be silently overwritten.",
    )

    default = get_default_setting(config, "WAIT", False, check_main=False)

    parser.add_argument(
        "--wait",
        action="store_true",
        default=default,
        help="On batch systems, wait for submitted jobs to complete",
    )

    default = get_default_setting(config, "ALLOW_PNL", False, check_main=False)

    parser.add_argument(
        "--allow-pnl",
        action="store_true",
        default=default,
        help="Do not pass skip-pnl to case.submit",
    )

    parser.add_argument(
        "--check-throughput",
        action="store_true",
        help="Fail if throughput check fails. Requires --wait on batch systems",
    )

    parser.add_argument(
        "--check-memory",
        action="store_true",
        help="Fail if memory check fails. Requires --wait on batch systems",
    )

    parser.add_argument(
        "--ignore-namelists",
        action="store_true",
        help="Do not fail if there namelist diffs",
    )

    parser.add_argument(
        "--ignore-memleak", action="store_true", help="Do not fail if there's a memleak"
    )

    default = get_default_setting(config, "FORCE_PROCS", None, check_main=False)

    parser.add_argument(
        "--force-procs",
        type=int,
        default=default,
        help="For all tests to run with this number of processors",
    )

    default = get_default_setting(config, "FORCE_THREADS", None, check_main=False)

    parser.add_argument(
        "--force-threads",
        type=int,
        default=default,
        help="For all tests to run with this number of threads",
    )

    default = get_default_setting(config, "INPUT_DIR", None, check_main=True)

    parser.add_argument(
        "-i",
        "--input-dir",
        default=default,
        help="Use a non-default location for input files",
    )

    default = get_default_setting(config, "PESFILE", None, check_main=True)

    parser.add_argument(
        "--pesfile",
        default=default,
        help="Full pathname of an optional pes specification file. The file"
        "\ncan follow either the config_pes.xml or the env_mach_pes.xml format.",
    )

    default = get_default_setting(config, "RETRY", 0, check_main=False)

    parser.add_argument(
        "--retry",
        type=int,
        default=default,
        help="Automatically retry failed tests. >0 implies --wait",
    )

    parser.add_argument(
        "-N",
        "--non-local",
        action="store_true",
        help="Use when you've requested a machine that you aren't on. "
        "Will reduce errors for missing directories etc.",
    )

    if config and config.has_option("main", "workflow"):
        workflow_default = config.get("main", "workflow")
    else:
        workflow_default = "default"

    parser.add_argument(
        "--workflow",
        default=workflow_default,
        help="A workflow from config_workflow.xml to apply to this case. ",
    )

    parser.add_argument(
        "--chksum", action="store_true", help="Verifies input data checksums."
    )

    srcroot_default = utils.get_src_root()

    parser.add_argument(
        "--srcroot",
        default=srcroot_default,
        help="Alternative pathname for source root directory. "
        f"The default is {srcroot_default}",
    )

    CIME.utils.add_mail_type_args(parser)

    args = CIME.utils.parse_args_and_handle_standard_logging_options(args, parser)

    CIME.utils.resolve_mail_type_args(args)

    # generate and compare flags may not point to the same directory
    if model in ["cesm", "ufs"]:
        if args.generate is not None:
            expect(
                not (args.generate == args.compare),
                "Cannot generate and compare baselines at the same time",
            )

        if args.xml_testlist is not None:
            expect(
                not (
                    args.xml_machine is None
                    and args.xml_compiler is None
                    and args.xml_category is None
                ),
                "If an xml-testlist is present at least one of --xml-machine, "
                "--xml-compiler, --xml-category must also be present",
            )

    else:
        expect(
            not (
                args.baseline_name is not None
                and (not args.compare and not args.generate)
            ),
            "Provided baseline name but did not specify compare or generate",
        )
        expect(
            not (args.compare and args.generate),
            "Tried to compare and generate at same time",
        )

    expect(
        not (args.namelists_only and not (args.generate or args.compare)),
        "Must provide either --compare or --generate with --namelists-only",
    )

    if args.retry > 0:
        args.wait = True

    if args.parallel_jobs is not None:
        expect(
            args.parallel_jobs > 0,
            "Invalid value for parallel_jobs: %d" % args.parallel_jobs,
        )

    if args.use_existing:
        expect(args.test_id is not None, "Must provide test-id of pre-existing cases")

    if args.no_setup:
        args.no_build = True

    if args.no_build:
        args.no_run = True

    # Namelist-only forces some other options:
    if args.namelists_only:
        expect(not args.no_setup, "Cannot compare namelists without setup")
        args.no_build = True
        args.no_run = True
        args.no_batch = True

    expect(
        not (args.non_local and not args.no_build), "Cannot build on non-local machine"
    )

    if args.single_submit:
        expect(
            not args.no_run,
            "Doesn't make sense to request single-submit if no-run is on",
        )
        args.no_build = True
        args.no_run = True
        args.no_batch = True

    if args.test_id is None:
        args.test_id = "%s_%s" % (CIME.utils.get_timestamp(), CIME.utils.id_generator())
    else:
        expect(
            CIME.utils.check_name(args.test_id, additional_chars="."),
            "invalid test-id argument provided",
        )

    if args.testfile is not None:
        with open(args.testfile, "r") as fd:
            args.testargs.extend(
                [
                    line.strip()
                    for line in fd.read().splitlines()
                    if line.strip() and not line.startswith("#")
                ]
            )

    # Propagate `srcroot` to `GenericXML` to resolve $SRCROOT
    # See call to `Machines` below
    utils.GLOBAL["SRCROOT"] = args.srcroot

    # Compute list of fully-resolved test_names
    test_extra_data = {}
    if model in ["cesm", "ufs"]:
        machine_name = args.xml_machine if args.machine is None else args.machine

        # If it's still unclear what machine to use, look at test names
        if machine_name is None:
            for test in args.testargs:
                testsplit = CIME.utils.parse_test_name(test)
                if testsplit[4] is not None:
                    if machine_name is None:
                        machine_name = testsplit[4]
                    else:
                        expect(
                            machine_name == testsplit[4],
                            "ambiguity in machine, please use the --machine option",
                        )

        mach_obj = Machines(machine=machine_name)
        if args.testargs:
            args.compiler = (
                mach_obj.get_default_compiler()
                if args.compiler is None
                else args.compiler
            )
            test_names = get_tests.get_full_test_names(
                args.testargs, mach_obj.get_machine_name(), args.compiler
            )
        else:
            expect(
                not (
                    args.xml_machine is None
                    and args.xml_compiler is None
                    and args.xml_category is None
                    and args.xml_testlist is None
                ),
                "At least one of --xml-machine, --xml-testlist, "
                "--xml-compiler, --xml-category or a valid test name must be provided.",
            )

            test_data = get_tests_from_xml(
                xml_machine=args.xml_machine,
                xml_category=args.xml_category,
                xml_compiler=args.xml_compiler,
                xml_testlist=args.xml_testlist,
                machine=machine_name,
                compiler=args.compiler,
                driver=args.xml_driver,
            )
            test_names = [item["name"] for item in test_data]
            for test_datum in test_data:
                test_extra_data[test_datum["name"]] = test_datum

        logger.info("Testnames: %s" % test_names)
    else:
        if args.machine is None:
            args.machine = get_tests.infer_machine_name_from_tests(args.testargs)

        mach_obj = Machines(machine=args.machine)
        args.compiler = (
            mach_obj.get_default_compiler() if args.compiler is None else args.compiler
        )

        test_names = get_tests.get_full_test_names(
            args.testargs, mach_obj.get_machine_name(), args.compiler
        )

    expect(
        mach_obj.is_valid_compiler(args.compiler),
        "Compiler %s not valid for machine %s"
        % (args.compiler, mach_obj.get_machine_name()),
    )

    if not args.wait and mach_obj.has_batch_system() and not args.no_batch:
        expect(
            not args.check_throughput,
            "Makes no sense to use --check-throughput without --wait",
        )
        expect(
            not args.check_memory, "Makes no sense to use --check-memory without --wait"
        )

    # Normalize compare/generate between the models
    baseline_cmp_name = None
    baseline_gen_name = None
    if args.compare or args.generate:
        if model in ["cesm", "ufs"]:
            if args.compare is not None:
                baseline_cmp_name = args.compare
            if args.generate is not None:
                baseline_gen_name = args.generate
        else:
            baseline_name = (
                args.baseline_name
                if args.baseline_name
                else CIME.utils.get_current_branch(repo=CIME.utils.get_cime_root())
            )
            expect(
                baseline_name is not None,
                "Could not determine baseline name from branch, please use -b option",
            )
            if args.compare:
                baseline_cmp_name = baseline_name
            elif args.generate:
                baseline_gen_name = baseline_name

    if args.input_dir is not None:
        args.input_dir = os.path.abspath(args.input_dir)

    # sanity check
    for name in test_names:
        dot_count = name.count(".")
        expect(dot_count > 1 and dot_count <= 4, "Invalid test Name, '{}'".format(name))

    # for e3sm, sort by walltime
    if model == "e3sm":
        if args.walltime is None:
            # Longest tests should run first
            test_names.sort(key=get_tests.key_test_time, reverse=True)
        else:
            test_names.sort()

    return (
        test_names,
        test_extra_data,
        args.compiler,
        mach_obj.get_machine_name(),
        args.no_run,
        args.no_build,
        args.no_setup,
        args.no_batch,
        args.test_root,
        args.baseline_root,
        args.clean,
        baseline_cmp_name,
        baseline_gen_name,
        args.namelists_only,
        args.project,
        args.test_id,
        args.parallel_jobs,
        args.walltime,
        args.single_submit,
        args.proc_pool,
        args.use_existing,
        args.save_timing,
        args.queue,
        args.allow_baseline_overwrite,
        args.output_root,
        args.wait,
        args.force_procs,
        args.force_threads,
        args.mpilib,
        args.input_dir,
        args.pesfile,
        args.retry,
        args.mail_user,
        args.mail_type,
        args.check_throughput,
        args.check_memory,
        args.ignore_namelists,
        args.ignore_memleak,
        args.allow_pnl,
        args.non_local,
        args.single_exe,
        args.workflow,
        args.chksum,
    )
Exemplo n.º 10
0
def parse_command_line(args, cimeroot, description):
    ###############################################################################
    parser = argparse.ArgumentParser(
        description=description, formatter_class=RawTextHelpFormatter
    )

    CIME.utils.setup_standard_logging_options(parser)

    try:
        cime_config = get_cime_config()
    except Exception:
        cime_config = None

    parser.add_argument(
        "--case",
        "-case",
        required=True,
        metavar="CASENAME",
        help="(required) Specify the case name. "
        "\nIf this is simply a name (not a path), the case directory is created in the current working directory."
        "\nThis can also be a relative or absolute path specifying where the case should be created;"
        "\nwith this usage, the name of the case will be the last component of the path.",
    )

    parser.add_argument(
        "--compset",
        "-compset",
        required=True,
        help="(required) Specify a compset. "
        "\nTo see list of current compsets, use the utility ./query_config --compsets in this directory.\n",
    )

    parser.add_argument(
        "--res",
        "-res",
        required=True,
        metavar="GRID",
        help="(required) Specify a model grid resolution. "
        "\nTo see list of current model resolutions, use the utility "
        "\n./query_config --grids in this directory.",
    )

    parser.add_argument(
        "--machine",
        "-mach",
        help="Specify a machine. "
        "The default value is the match to NODENAME_REGEX in config_machines.xml. To see "
        "\nthe list of current machines, invoke ./query_config --machines.",
    )

    parser.add_argument(
        "--compiler",
        "-compiler",
        help="Specify a compiler. "
        "\nTo see list of supported compilers for each machine, use the utility "
        "\n./query_config --machines in this directory. "
        "\nThe default value will be the first one listed.",
    )

    parser.add_argument(
        "--multi-driver",
        action="store_true",
        help="Specify that --ninst should modify the number of driver/coupler instances. "
        "\nThe default is to have one driver/coupler supporting multiple component instances.",
    )

    parser.add_argument(
        "--ninst",
        default=1,
        type=int,
        help="Specify number of model ensemble instances. "
        "\nThe default is multiple components and one driver/coupler. "
        "\nUse --multi-driver to run multiple driver/couplers in the ensemble.",
    )

    parser.add_argument(
        "--mpilib",
        "-mpilib",
        help="Specify the MPI library. "
        "To see list of supported mpilibs for each machine, invoke ./query_config --machines."
        "\nThe default is the first listing in MPILIBS in config_machines.xml.\n",
    )

    parser.add_argument(
        "--project",
        "-project",
        help="Specify a project id for the case (optional)."
        "\nUsed for accounting and directory permissions when on a batch system."
        "\nThe default is user or machine specified by PROJECT."
        "\nAccounting (only) may be overridden by user or machine specified CHARGE_ACCOUNT.",
    )

    parser.add_argument(
        "--pecount",
        "-pecount",
        default="M",
        help="Specify a target size description for the number of cores. "
        "\nThis is used to query the appropriate config_pes.xml file and find the "
        "\noptimal PE-layout for your case - if it exists there. "
        "\nAllowed options are  ('S','M','L','X1','X2','[0-9]x[0-9]','[0-9]').\n",
    )

    # This option supports multiple values, hence the plural ("user-mods-dirs"). However,
    # we support the singular ("user-mods-dir") for backwards compatibility (and because
    # the singular may be more intuitive for someone who only wants to use a single
    # directory).
    parser.add_argument(
        "--user-mods-dirs",
        "--user-mods-dir",
        nargs="*",
        help="Full pathname to a directory containing any combination of user_nl_* files "
        "\nand a shell_commands script (typically containing xmlchange commands). "
        "\nThe directory can also contain an SourceMods/ directory with the same structure "
        "\nas would be found in a case directory."
        "\nIt can also contain a file named 'include_user_mods' which gives the path to"
        "\none or more other directories that should be included."
        "\nMultiple directories can be given to the --user-mods-dirs argument,"
        "\nin which case changes from all of them are applied."
        "\n(If there are conflicts, later directories take precedence.)"
        "\n(Care is needed if multiple directories include the same directory via 'include_user_mods':"
        "\nin this case, the included directory will be applied multiple times.)",
    )

    parser.add_argument(
        "--pesfile",
        help="Full pathname of an optional pes specification file. "
        "\nThe file can follow either the config_pes.xml or the env_mach_pes.xml format.",
    )

    parser.add_argument(
        "--gridfile",
        help="Full pathname of config grid file to use. "
        "\nThis should be a copy of config/config_grids.xml with the new user grid changes added to it. \n",
    )

    if cime_config and cime_config.has_option("main", "workflow"):
        workflow_default = cime_config.get("main", "workflow")
    else:
        workflow_default = "default"

    parser.add_argument(
        "--workflow",
        default=workflow_default,
        help="A workflow from config_workflow.xml to apply to this case. ",
    )

    if cime_config:
        model = get_model()
    else:
        model = None

    srcroot_default = get_src_root()

    parser.add_argument(
        "--srcroot",
        default=srcroot_default,
        help="Alternative pathname for source root directory. "
        f"The default is {srcroot_default}",
    )

    parser.add_argument(
        "--output-root",
        help="Alternative pathname for the directory where case output is written.",
    )

    # The following is a deprecated option
    parser.add_argument(
        "--script-root", dest="script_root", default=None, help=argparse.SUPPRESS
    )

    if model == "cesm":
        parser.add_argument(
            "--run-unsupported",
            action="store_true",
            help="Force the creation of a case that is not tested or supported by CESM developers.",
        )
    # hidden argument indicating called from create_test
    # Indicates that create_newcase was called from create_test - do not use otherwise.
    parser.add_argument("--test", "-test", action="store_true", help=argparse.SUPPRESS)

    parser.add_argument(
        "--walltime",
        default=os.getenv("CIME_GLOBAL_WALLTIME"),
        help="Set the wallclock limit for this case in the format (the usual format is HH:MM:SS). "
        "\nYou may use env var CIME_GLOBAL_WALLTIME to set this. "
        "\nIf CIME_GLOBAL_WALLTIME is not defined in the environment, then the walltime"
        "\nwill be the maximum allowed time defined for the queue in config_batch.xml.",
    )

    parser.add_argument(
        "-q",
        "--queue",
        default=None,
        help="Force batch system to use the specified queue. ",
    )

    parser.add_argument(
        "--handle-preexisting-dirs",
        dest="answer",
        choices=("a", "r", "u"),
        default=None,
        help="Do not query how to handle pre-existing bld/exe dirs. "
        "\nValid options are (a)bort (r)eplace or (u)se existing. "
        "\nThis can be useful if you need to run create_newcase non-iteractively.",
    )

    parser.add_argument(
        "-i",
        "--input-dir",
        help="Use a non-default location for input files. This will change the xml value of DIN_LOC_ROOT.",
    )
    parser.add_argument(
        "--driver",
        default=get_cime_default_driver(),
        choices=("mct", "nuopc", "moab"),
        help=argparse.SUPPRESS,
    )

    parser.add_argument(
        "-n",
        "--non-local",
        action="store_true",
        help="Use when you've requested a machine that you aren't on. "
        "Will reduce errors for missing directories etc.",
    )

    parser.add_argument(
        "--extra-machines-dir",
        help="Optional path to a directory containing one or more of:"
        "\nconfig_machines.xml, config_compilers.xml, config_batch.xml."
        "\nIf provided, the contents of these files will be appended to"
        "\nthe standard machine files (and any files in ~/.cime).",
    )

    parser.add_argument("--case-group", help="Add this case to a case group")

    parser.add_argument(
        "--ngpus-per-node",
        default=0,
        type=int,
        help="Specify number of GPUs used for simulation. ",
    )

    args = CIME.utils.parse_args_and_handle_standard_logging_options(args, parser)

    if args.srcroot is not None:
        expect(
            os.path.isdir(args.srcroot),
            "Input non-default directory srcroot {} does not exist ".format(
                args.srcroot
            ),
        )
        args.srcroot = os.path.abspath(args.srcroot)

    if args.gridfile is not None:
        expect(
            os.path.isfile(args.gridfile),
            "Grid specification file {} does not exist ".format(args.gridfile),
        )

    if args.pesfile is not None:
        expect(
            os.path.isfile(args.pesfile),
            "Pes specification file {} cannot be found ".format(args.pesfile),
        )

    run_unsupported = False
    if model == "cesm":
        run_unsupported = args.run_unsupported

    expect(
        CIME.utils.check_name(args.case, fullpath=True),
        "Illegal case name argument provided",
    )

    if args.input_dir is not None:
        args.input_dir = os.path.abspath(args.input_dir)
    elif cime_config and cime_config.has_option("main", "input_dir"):
        args.input_dir = os.path.abspath(cime_config.get("main", "input_dir"))

    return (
        args.case,
        args.compset,
        args.res,
        args.machine,
        args.compiler,
        args.mpilib,
        args.project,
        args.pecount,
        args.user_mods_dirs,
        args.pesfile,
        args.gridfile,
        args.srcroot,
        args.test,
        args.multi_driver,
        args.ninst,
        args.walltime,
        args.queue,
        args.output_root,
        args.script_root,
        run_unsupported,
        args.answer,
        args.input_dir,
        args.driver,
        args.workflow,
        args.non_local,
        args.extra_machines_dir,
        args.case_group,
        args.ngpus_per_node,
    )