Example #1
0
def save_test_success(baseline_root, src_root, test, succeeded, force_commit_test=None):
    """
    Update success data accordingly based on succeeded flag
    """
    if baseline_root is not None:
        try:
            with SharedArea():
                success_path, prev_results = _read_success_data(baseline_root, test)

                the_dir = os.path.dirname(success_path)
                if not os.path.exists(the_dir):
                    os.makedirs(the_dir)

                prev_succeeded = _is_test_working(prev_results, src_root, testing=(force_commit_test is not None))

                # if no transition occurred then no update is needed
                if succeeded or succeeded != prev_succeeded or (prev_results[0] is None and succeeded) or (prev_results[1] is None and not succeeded):

                    new_results = list(prev_results)
                    my_commit = force_commit_test if force_commit_test else get_current_commit(repo=src_root)
                    if succeeded:
                        new_results[0] = my_commit # we passed
                    else:
                        new_results[1] = my_commit # we transitioned to a failing state

                    str_results = ["None" if item is None else item for item in new_results]
                    with open(success_path, "w") as fd:
                        fd.write("{}\n".format(" ".join(str_results)))

        except Exception:
            # We NEVER want a failure here to kill the run
            logger.warning("Failed to store test success: {}".format(sys.exc_info()[1]))
def write_provenance_info(machine, test_compiler, test_mpilib, test_root):
    curr_commit = get_current_commit(repo=CIMEROOT)
    logging.info("Testing commit %s" % curr_commit)
    cime_model = get_model()
    logging.info("Using cime_model = %s" % cime_model)
    logging.info("Testing machine = %s" % machine.get_machine_name())
    if test_compiler is not None:
        logging.info("Testing compiler = %s" % test_compiler)
    if test_mpilib is not None:
        logging.info("Testing mpilib = %s" % test_mpilib)
    logging.info("Test root: %s" % test_root)
    logging.info("Test driver: %s" % CIME.utils.get_cime_default_driver())
    logging.info("Python version {}\n".format(sys.version))
Example #3
0
def _record_git_provenance(srcroot, exeroot, lid):
    """Records git provenance

    Records git status, diff and logs for main repo and all submodules.
    """
    # Save git describe
    describe_prov = os.path.join(exeroot, "GIT_DESCRIBE.{}".format(lid))
    desc = utils.get_current_commit(tag=True, repo=srcroot)
    with open(describe_prov, "w") as fd:
        fd.write(desc)

    gitroot = _find_git_root(srcroot)

    # Save HEAD
    headfile = os.path.join(gitroot, "logs", "HEAD")
    headfile_prov = os.path.join(exeroot, "GIT_LOGS_HEAD.{}".format(lid))
    if os.path.exists(headfile_prov):
        os.remove(headfile_prov)
    if os.path.exists(headfile):
        utils.safe_copy(headfile, headfile_prov, preserve_meta=False)

    # Save git submodule status
    submodule_prov = os.path.join(exeroot,
                                  "GIT_SUBMODULE_STATUS.{}".format(lid))
    subm_status = utils.get_current_submodule_status(recursive=True,
                                                     repo=srcroot)
    with open(submodule_prov, "w") as fd:
        fd.write(subm_status)

    # Git Status
    status_prov = os.path.join(exeroot, "GIT_STATUS.{}".format(lid))
    _run_git_cmd_recursively("status", srcroot, status_prov)

    # Git Diff
    diff_prov = os.path.join(exeroot, "GIT_DIFF.{}".format(lid))
    _run_git_cmd_recursively("diff", srcroot, diff_prov)

    # Git Log
    log_prov = os.path.join(exeroot, "GIT_LOG.{}".format(lid))
    cmd = "log --first-parent --pretty=oneline -n 5"
    _run_git_cmd_recursively(cmd, srcroot, log_prov)

    # Git remote
    remote_prov = os.path.join(exeroot, "GIT_REMOTE.{}".format(lid))
    _run_git_cmd_recursively("remote -v", srcroot, remote_prov)

    # Git config
    config_src = os.path.join(gitroot, "config")
    config_prov = os.path.join(exeroot, "GIT_CONFIG.{}".format(lid))
    utils.safe_copy(config_src, config_prov, preserve_meta=False)
Example #4
0
 def set_model_version(self, model):
     version = "unknown"
     if model == "cesm":
         srcroot = self.get_value("SRCROOT")
         changelog = os.path.join(srcroot,"ChangeLog")
         expect(os.path.isfile(changelog), " No CESM ChangeLog file found")
         for line in open(changelog, "r"):
             m = re.search("Tag name: (cesm.*)$", line)
             if m is not None:
                 version = m.group(1)
                 break
     elif model == "acme":
         cimeroot = self.get_value("CIMEROOT")
         version = get_current_commit(True, cimeroot)
     self.set_value("MODEL_VERSION", version)
Example #5
0
def _save_build_provenance_e3sm(case, lid):
    cimeroot = case.get_value("CIMEROOT")
    exeroot = case.get_value("EXEROOT")
    caseroot = case.get_value("CASEROOT")

    # Save git describe
    describe_prov = os.path.join(exeroot, "GIT_DESCRIBE.{}".format(lid))
    desc = get_current_commit(tag=True, repo=cimeroot)
    with open(describe_prov, "w") as fd:
        fd.write(desc)

    # Save HEAD
    headfile = os.path.join(cimeroot, ".git", "logs", "HEAD")
    headfile_prov = os.path.join(exeroot, "GIT_LOGS_HEAD.{}".format(lid))
    if os.path.exists(headfile_prov):
        os.remove(headfile_prov)
    if os.path.exists(headfile):
        copy_umask(headfile, headfile_prov)

    # Save SourceMods
    sourcemods = os.path.join(caseroot, "SourceMods")
    sourcemods_prov = os.path.join(exeroot, "SourceMods.{}.tar.gz".format(lid))
    if os.path.exists(sourcemods_prov):
        os.remove(sourcemods_prov)
    if os.path.isdir(sourcemods):
        with tarfile.open(sourcemods_prov, "w:gz") as tfd:
            tfd.add(sourcemods, arcname="SourceMods")

    # Save build env
    env_prov = os.path.join(exeroot, "build_environment.{}.txt".format(lid))
    if os.path.exists(env_prov):
        os.remove(env_prov)
    env_module = case.get_env("mach_specific")
    env_module.save_all_env_info(env_prov)

    # For all the just-created post-build provenance files, symlink a generic name
    # to them to indicate that these are the most recent or active.
    for item in ["GIT_DESCRIBE", "GIT_LOGS_HEAD", "SourceMods", "build_environment"]:
        globstr = "{}/{}.{}*".format(exeroot, item, lid)
        matches = glob.glob(globstr)
        expect(len(matches) < 2, "Multiple matches for glob {} should not have happened".format(globstr))
        if matches:
            the_match = matches[0]
            generic_name = the_match.replace(".{}".format(lid), "")
            if os.path.exists(generic_name):
                os.remove(generic_name)
            os.symlink(the_match, generic_name)
Example #6
0
    def set_model_version(self, model):
        version = "unknown"
        srcroot = self.get_value("SRCROOT")
        if model == "cesm":
            changelog = os.path.join(srcroot, "ChangeLog")
            if os.path.isfile(changelog):
                for line in open(changelog, "r"):
                    m = re.search("Tag name: (cesm.*)$", line)
                    if m is not None:
                        version = m.group(1)
                        break
        elif model == "acme":
            version = get_current_commit(True, srcroot)
        self.set_value("MODEL_VERSION", version)

        if version != "unknown":
            logger.info("%s model version found: %s" % (model, version))
        else:
            logger.warn("WARNING: No %s Model version found." % (model))
Example #7
0
    def set_model_version(self, model):
        version = "unknown"
        srcroot = self.get_value("SRCROOT")
        if model == "cesm":
            changelog = os.path.join(srcroot,"ChangeLog")
            if os.path.isfile(changelog):
                for line in open(changelog, "r"):
                    m = re.search("Tag name: (cesm.*)$", line)
                    if m is not None:
                        version = m.group(1)
                        break
        elif model == "acme":
            version = get_current_commit(True, srcroot)
        self.set_value("MODEL_VERSION", version)

        if version != "unknown":
            logger.info("%s model version found: %s"%(model, version))
        else:
            logger.warn("WARNING: No %s Model version found."%(model))
Example #8
0
        coupler_log_path=case.get_value("RUNDIR"))
    if newestcpllogfile is None:
        logger.warning("No cpl.log file found in directory {}".format(
            case.get_value("RUNDIR")))
    else:
        safe_copy(newestcpllogfile, os.path.join(basegen_dir, "cpl.log.gz"))

    expect(
        num_gen > 0,
        "Could not generate any hist files for case '{}', something is seriously wrong"
        .format(os.path.join(rundir, testcase)))
    #make sure permissions are open in baseline directory
    for root, _, files in os.walk(basegen_dir):
        for name in files:
            try:
                os.chmod(
                    os.path.join(root, name), stat.S_IRUSR | stat.S_IWUSR
                    | stat.S_IRGRP | stat.S_IWGRP | stat.S_IROTH)
            except OSError:
                # We tried. Not worth hard failure here.
                pass

    if get_model() == "e3sm":
        bless_log = os.path.join(basegen_dir, BLESS_LOG_NAME)
        with open(bless_log, "a") as fd:
            fd.write("sha:{} date:{}\n".format(
                get_current_commit(repo=case.get_value("CIMEROOT")),
                get_timestamp(timestamp_format="%Y-%m-%d_%H:%M:%S")))

    return True, comments
Example #9
0
def _save_prerun_timing_e3sm(case, lid):
    project = case.get_value("PROJECT", subgroup=case.get_primary_job())
    if not case.is_save_timing_dir_project(project):
        return

    timing_dir = case.get_value("SAVE_TIMING_DIR")
    if timing_dir is None or not os.path.isdir(timing_dir):
        logger.warning("SAVE_TIMING_DIR {} is not valid. E3SM requires a valid SAVE_TIMING_DIR to archive timing data.".format(timing_dir))
        return

    logger.info("Archiving timing data and associated provenance in {}.".format(timing_dir))
    rundir = case.get_value("RUNDIR")
    blddir = case.get_value("EXEROOT")
    caseroot = case.get_value("CASEROOT")
    cimeroot = case.get_value("CIMEROOT")
    base_case = case.get_value("CASE")
    full_timing_dir = os.path.join(timing_dir, "performance_archive", getpass.getuser(), base_case, lid)
    if os.path.exists(full_timing_dir):
        logger.warning("{} already exists. Skipping archive of timing data and associated provenance.".format(full_timing_dir))
        return

    try:
        os.makedirs(full_timing_dir)
    except OSError:
        logger.warning("{} cannot be created. Skipping archive of timing data and associated provenance.".format(full_timing_dir))
        return

    mach = case.get_value("MACH")
    compiler = case.get_value("COMPILER")

    # For some batch machines save queue info
    job_id = _get_batch_job_id_for_syslog(case)
    if job_id is not None:
        if mach == "mira":
            for cmd, filename in [("qstat -f", "qstatf"), ("qstat -lf %s" % job_id, "qstatf_jobid")]:
                filename = "%s.%s" % (filename, lid)
                run_cmd_no_fail(cmd, arg_stdout=filename, from_dir=full_timing_dir)
                gzip_existing_file(os.path.join(full_timing_dir, filename))
        elif mach == "theta":
            for cmd, filename in [("qstat -l --header JobID:JobName:User:Project:WallTime:QueuedTime:Score:RunTime:TimeRemaining:Nodes:State:Location:Mode:Command:Args:Procs:Queue:StartTime:attrs:Geometry", "qstatf"),
                                  ("qstat -lf %s" % job_id, "qstatf_jobid"),
                                  ("xtnodestat", "xtnodestat"),
                                  ("xtprocadmin", "xtprocadmin")]:
                filename = "%s.%s" % (filename, lid)
                run_cmd_no_fail(cmd, arg_stdout=filename, from_dir=full_timing_dir)
                gzip_existing_file(os.path.join(full_timing_dir, filename))
        elif mach in ["edison", "cori-haswell", "cori-knl"]:
            for cmd, filename in [("sinfo -a -l", "sinfol"), ("sqs -f %s" % job_id, "sqsf_jobid"),
                                  # ("sqs -f", "sqsf"),
                                  ("squeue -o '%.10i %.15P %.20j %.10u %.7a %.2t %.6D %.8C %.10M %.10l %.20S %.20V'", "squeuef"),
                                  ("squeue -t R -o '%.10i %R'", "squeues")]:
                filename = "%s.%s" % (filename, lid)
                run_cmd_no_fail(cmd, arg_stdout=filename, from_dir=full_timing_dir)
                gzip_existing_file(os.path.join(full_timing_dir, filename))
        elif mach == "titan":
            for cmd, filename in [("qstat -f %s >" % job_id, "qstatf_jobid"),
                                  ("xtnodestat >", "xtnodestat"),
                                  # ("qstat -f >", "qstatf"),
                                  # ("xtdb2proc -f", "xtdb2proc"),
                                  ("showq >", "showq")]:
                full_cmd = cmd + " " + filename
                run_cmd_no_fail(full_cmd + "." + lid, from_dir=full_timing_dir)
                gzip_existing_file(os.path.join(full_timing_dir, filename + "." + lid))

            # mdiag_reduce = os.path.join(full_timing_dir, "mdiag_reduce." + lid)
            # run_cmd_no_fail("./mdiag_reduce.csh", arg_stdout=mdiag_reduce, from_dir=os.path.join(caseroot, "Tools"))
            # gzip_existing_file(mdiag_reduce)
        elif mach == "anvil":
            for cmd, filename in [("qstat -f -1 acme >", "qstatf"),
                                  ("qstat -f %s >" % job_id, "qstatf_jobid"),
                                  ("qstat -r acme >", "qstatr")]:
                full_cmd = cmd + " " + filename
                run_cmd_no_fail(full_cmd + "." + lid, from_dir=full_timing_dir)
                gzip_existing_file(os.path.join(full_timing_dir, filename + "." + lid))

    # copy/tar SourceModes
    source_mods_dir = os.path.join(caseroot, "SourceMods")
    if os.path.isdir(source_mods_dir):
        with tarfile.open(os.path.join(full_timing_dir, "SourceMods.{}.tar.gz".format(lid)), "w:gz") as tfd:
            tfd.add(source_mods_dir, arcname="SourceMods")

    # Save various case configuration items
    case_docs = os.path.join(full_timing_dir, "CaseDocs.{}".format(lid))
    os.mkdir(case_docs)
    globs_to_copy = [
        "CaseDocs/*",
        "*.run",
        ".*.run",
        "*.xml",
        "user_nl_*",
        "*env_mach_specific*",
        "Macros*",
        "README.case",
        "Depends.{}".format(mach),
        "Depends.{}".format(compiler),
        "Depends.{}.{}".format(mach, compiler),
        "software_environment.txt"
        ]
    for glob_to_copy in globs_to_copy:
        for item in glob.glob(os.path.join(caseroot, glob_to_copy)):
            copy_umask(item, os.path.join(case_docs, "{}.{}".format(os.path.basename(item).lstrip("."), lid)))

    # Copy some items from build provenance
    blddir_globs_to_copy = [
        "GIT_LOGS_HEAD",
        "build_environment.txt"
        ]
    for blddir_glob_to_copy in blddir_globs_to_copy:
        for item in glob.glob(os.path.join(blddir, blddir_glob_to_copy)):
            copy_umask(item, os.path.join(full_timing_dir, os.path.basename(item) + "." + lid))

    # Save state of repo
    from_repo = cimeroot if os.path.exists(os.path.join(cimeroot, ".git")) else os.path.dirname(cimeroot)
    desc = get_current_commit(tag=True, repo=from_repo)
    with open(os.path.join(full_timing_dir, "GIT_DESCRIBE.{}".format(lid)), "w") as fd:
        fd.write(desc)

    # What this block does is mysterious to me (JGF)
    if job_id is not None:

        # Kill mach_syslog from previous run if one exists
        syslog_jobid_path = os.path.join(rundir, "syslog_jobid.{}".format(job_id))
        if os.path.exists(syslog_jobid_path):
            try:
                with open(syslog_jobid_path, "r") as fd:
                    syslog_jobid = int(fd.read().strip())
                os.kill(syslog_jobid, signal.SIGTERM)
            except (ValueError, OSError) as e:
                logger.warning("Failed to kill syslog: {}".format(e))
            finally:
                os.remove(syslog_jobid_path)

        # If requested, spawn a mach_syslog process to monitor job progress
        sample_interval = case.get_value("SYSLOG_N")
        if sample_interval > 0:
            archive_checkpoints = os.path.join(full_timing_dir, "checkpoints.{}".format(lid))
            os.mkdir(archive_checkpoints)
            touch("{}/e3sm.log.{}".format(rundir, lid))
            syslog_jobid = run_cmd_no_fail("./mach_syslog {:d} {} {} {} {}/timing/checkpoints {} >& /dev/null & echo $!".format(sample_interval, job_id, lid, rundir, rundir, archive_checkpoints),
                                           from_dir=os.path.join(caseroot, "Tools"))
            with open(os.path.join(rundir, "syslog_jobid.{}".format(job_id)), "w") as fd:
                fd.write("{}\n".format(syslog_jobid))
Example #10
0
    newestcpllogfile = case.get_latest_cpl_log(coupler_log_path=case.get_value("RUNDIR"), cplname=cplname)
    if newestcpllogfile is None:
        logger.warning("No {}.log file found in directory {}".format(cplname,case.get_value("RUNDIR")))
    else:
        safe_copy(newestcpllogfile, os.path.join(basegen_dir, "{}.log.gz".format(cplname)), preserve_meta=False)

    testname = case.get_value("TESTCASE")
    testopts = parse_test_name(case.get_value("CASEBASEID"))[1]
    testopts = [] if testopts is None else testopts
    expect(num_gen > 0 or (testname in ["PFS", "TSC"] or "B" in testopts),
           "Could not generate any hist files for case '{}', something is seriously wrong".format(os.path.join(rundir, testcase)))

    if get_model() == "e3sm":
        bless_log = os.path.join(basegen_dir, BLESS_LOG_NAME)
        with open(bless_log, "a") as fd:
            fd.write("sha:{} date:{}\n".format(get_current_commit(repo=case.get_value("CIMEROOT")),
                                               get_timestamp(timestamp_format="%Y-%m-%d_%H:%M:%S")))

    return True, comments

def generate_baseline(case, baseline_dir=None, allow_baseline_overwrite=False):
    with SharedArea():
        return _generate_baseline_impl(case, baseline_dir=baseline_dir, allow_baseline_overwrite=allow_baseline_overwrite)

def get_ts_synopsis(comments):
    r"""
    Reduce case diff comments down to a single line synopsis so that we can put
    something in the TestStatus file. It's expected that the comments provided
    to this function came from compare_baseline, not compare_tests.

    >>> get_ts_synopsis('')
Example #11
0
    def _generate_baseline(self):
        """
        generate a new baseline case based on the current test
        """
        with self._test_status:
            # generate baseline

            # BEGIN: modified CIME.hist_utils.generate_baseline
            rundir = self._case.get_value("RUNDIR")
            basegen_dir = os.path.join(self._case.get_value("BASELINE_ROOT"),
                                       self._case.get_value("BASEGEN_CASE"))
            testcase = self._case.get_value("CASE")

            if not os.path.isdir(basegen_dir):
                os.makedirs(basegen_dir)

            if os.path.isdir(os.path.join(basegen_dir, testcase)):
                expect(
                    False,
                    " Cowardly refusing to overwrite existing baseline directory"
                )

            comments = "Generating baselines into '{}'\n".format(basegen_dir)
            num_gen = 0

            model = 'cam'
            comments += "  generating for model '{}'\n".format(model)
            hists = _get_all_hist_files(testcase, model, rundir)
            logger.debug("mvk_hist_files: {}".format(hists))

            num_gen += len(hists)
            for hist in hists:
                basename = hist[hist.rfind(model):]
                baseline = os.path.join(basegen_dir, basename)
                if os.path.exists(baseline):
                    os.remove(baseline)

                shutil.copy(hist, baseline)
                comments += "    generating baseline '{}' from file {}\n".format(
                    baseline, hist)

            newestcpllogfile = self._case.get_latest_cpl_log(
                coupler_log_path=self._case.get_value("LOGDIR"))
            if newestcpllogfile is None:
                logger.warning(
                    "No cpl.log file found in log directory {}".format(
                        self._case.get_value("LOGDIR")))
            else:
                shutil.copyfile(newestcpllogfile,
                                os.path.join(basegen_dir, "cpl.log.gz"))

            expect(
                num_gen > 0,
                "Could not generate any hist files for case '{}', something is seriously wrong"
                .format(os.path.join(rundir, testcase)))
            # make sure permissions are open in baseline directory
            for root, _, files in os.walk(basegen_dir):
                for name in files:
                    try:
                        os.chmod(
                            os.path.join(root,
                                         name), stat.S_IRUSR | stat.S_IWUSR
                            | stat.S_IRGRP | stat.S_IWGRP | stat.S_IROTH)
                    except OSError:
                        # We tried. Not worth hard failure here.
                        pass

            if get_model() == "e3sm":
                bless_log = os.path.join(basegen_dir, BLESS_LOG_NAME)
                with open(bless_log, "a") as fd:
                    fd.write("sha:{} date:{}\n".format(
                        get_current_commit(
                            repo=self._case.get_value("CIMEROOT")),
                        get_timestamp(timestamp_format="%Y-%m-%d_%H:%M:%S")))
            # END: modified CIME.hist_utils.generate_baseline

            append_testlog(comments)
            status = CIME.test_status.TEST_PASS_STATUS
            baseline_name = self._case.get_value("BASEGEN_CASE")
            self._test_status.set_status(
                "{}".format(CIME.test_status.GENERATE_PHASE),
                status,
                comments=os.path.dirname(baseline_name))
            basegen_dir = os.path.join(self._case.get_value("BASELINE_ROOT"),
                                       self._case.get_value("BASEGEN_CASE"))
            # copy latest cpl log to baseline
            # drop the date so that the name is generic
            newestcpllogfiles = self._get_latest_cpl_logs()
            for cpllog in newestcpllogfiles:
                m = re.search(r"/(cpl.*.log).*.gz", cpllog)
                if m is not None:
                    baselog = os.path.join(basegen_dir, m.group(1)) + ".gz"
                    shutil.copyfile(cpllog, os.path.join(basegen_dir, baselog))
Example #12
0
                "w:gz") as tfd:
            tfd.add(source_mods_dir, arcname="SourceMods")

    # Save various case configuration items
    case_docs = os.path.join(full_timing_dir, "CaseDocs.{}".format(lid))
    os.mkdir(case_docs)

    _copy_caseroot_files(mach, compiler, caseroot, case_docs, lid)
    _copy_blddir_files(blddir, full_timing_dir, lid)
    _copy_rundir_files(rundir, full_timing_dir, lid)

    # Save state of repo
    from_repo = (srcroot if os.path.exists(os.path.join(srcroot, ".git")) else
                 os.path.dirname(srcroot))

    desc = utils.get_current_commit(tag=True, repo=from_repo)
    with open(os.path.join(full_timing_dir, "GIT_DESCRIBE.{}".format(lid)),
              "w") as fd:
        fd.write(desc)

    # What this block does is mysterious to me (JGF)
    if job_id is not None:
        _record_syslog(case, lid, job_id, caseroot, rundir, full_timing_dir)


def _record_queue_info(mach, job_id, lid, full_timing_dir):
    if mach == "theta":
        _record_anl_theta_queue(job_id, lid, full_timing_dir)
    elif mach in ["cori-haswell", "cori-knl"]:
        _record_nersc_queue(job_id, lid, full_timing_dir)
    elif mach in ["anvil", "chrysalis", "compy"]:
Example #13
0
    testname = case.get_value("TESTCASE")
    testopts = parse_test_name(case.get_value("CASEBASEID"))[1]
    testopts = [] if testopts is None else testopts
    expect(
        num_gen > 0 or (testname in NO_HIST_TESTS or "B" in testopts),
        "Could not generate any hist files for case '{}', something is seriously wrong".format(
            os.path.join(rundir, testcase)
        ),
    )

    if get_model() == "e3sm":
        bless_log = os.path.join(basegen_dir, BLESS_LOG_NAME)
        with open(bless_log, "a", encoding="utf-8") as fd:
            fd.write(
                "sha:{} date:{}\n".format(
                    get_current_commit(repo=case.get_value("SRCROOT")),
                    get_timestamp(timestamp_format="%Y-%m-%d_%H:%M:%S"),
                )
            )

    return True, comments


def generate_baseline(case, baseline_dir=None, allow_baseline_overwrite=False):
    with SharedArea():
        return _generate_baseline_impl(
            case,
            baseline_dir=baseline_dir,
            allow_baseline_overwrite=allow_baseline_overwrite,
        )
Example #14
0
File: mvk.py Project: bertinia/cime
    def _generate_baseline(self):
        """
        generate a new baseline case based on the current test
        """
        with self._test_status:
            # generate baseline

            # BEGIN: modified CIME.hist_utils.generate_baseline
            rundir = self._case.get_value("RUNDIR")
            basegen_dir = os.path.join(self._case.get_value("BASELINE_ROOT"),
                                       self._case.get_value("BASEGEN_CASE"))
            testcase = self._case.get_value("CASE")

            if not os.path.isdir(basegen_dir):
                os.makedirs(basegen_dir)

            if os.path.isdir(os.path.join(basegen_dir, testcase)):
                expect(False, " Cowardly refusing to overwrite existing baseline directory")

            comments = "Generating baselines into '{}'\n".format(basegen_dir)
            num_gen = 0

            model = 'cam'
            comments += "  generating for model '{}'\n".format(model)
            hists = _get_all_hist_files(testcase, model, rundir)
            logger.debug("mvk_hist_files: {}".format(hists))

            num_gen += len(hists)
            for hist in hists:
                basename = hist[hist.rfind(model):]
                baseline = os.path.join(basegen_dir, basename)
                if os.path.exists(baseline):
                    os.remove(baseline)

                shutil.copy(hist, baseline)
                comments += "    generating baseline '{}' from file {}\n".format(baseline, hist)

            newestcpllogfile = self._case.get_latest_cpl_log(coupler_log_path=self._case.get_value("LOGDIR"))
            if newestcpllogfile is None:
                logger.warning("No cpl.log file found in log directory {}".format(self._case.get_value("LOGDIR")))
            else:
                shutil.copyfile(newestcpllogfile,
                                os.path.join(basegen_dir, "cpl.log.gz"))

            expect(num_gen > 0, "Could not generate any hist files for case '{}', something is seriously wrong".format(
                os.path.join(rundir, testcase)))
            # make sure permissions are open in baseline directory
            for root, _, files in os.walk(basegen_dir):
                for name in files:
                    try:
                        os.chmod(os.path.join(root, name),
                                 stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IROTH)
                    except OSError:
                        # We tried. Not worth hard failure here.
                        pass

            if get_model() == "e3sm":
                bless_log = os.path.join(basegen_dir, BLESS_LOG_NAME)
                with open(bless_log, "a") as fd:
                    fd.write("sha:{} date:{}\n".format(get_current_commit(repo=self._case.get_value("CIMEROOT")),
                                                       get_timestamp(timestamp_format="%Y-%m-%d_%H:%M:%S")))
            # END: modified CIME.hist_utils.generate_baseline

            append_testlog(comments)
            status = CIME.test_status.TEST_PASS_STATUS
            baseline_name = self._case.get_value("BASEGEN_CASE")
            self._test_status.set_status("{}".format(CIME.test_status.GENERATE_PHASE), status,
                                         comments=os.path.dirname(baseline_name))
            basegen_dir = os.path.join(self._case.get_value("BASELINE_ROOT"), self._case.get_value("BASEGEN_CASE"))
            # copy latest cpl log to baseline
            # drop the date so that the name is generic
            newestcpllogfiles = self._get_latest_cpl_logs()
            for cpllog in newestcpllogfiles:
                m = re.search(r"/(cpl.*.log).*.gz", cpllog)
                if m is not None:
                    baselog = os.path.join(basegen_dir, m.group(1)) + ".gz"
                    shutil.copyfile(cpllog,
                                    os.path.join(basegen_dir, baselog))
Example #15
0
    remote_prov = os.path.join(exeroot, "GIT_REMOTE.{}".format(lid))
    _run_git_cmd_recursively("remote -v", srcroot, remote_prov)

    # Git config
    config_src = os.path.join(srcroot, ".git", "config")
    config_prov = os.path.join(exeroot, "GIT_CONFIG.{}".format(lid))
    safe_copy(config_src, config_prov, preserve_meta=False)

def _save_build_provenance_e3sm(case, lid):
    srcroot = case.get_value("SRCROOT")
    exeroot = case.get_value("EXEROOT")
    caseroot = case.get_value("CASEROOT")

    # Save git describe
    describe_prov = os.path.join(exeroot, "GIT_DESCRIBE.{}".format(lid))
    desc = get_current_commit(tag=True, repo=srcroot)
    with open(describe_prov, "w") as fd:
        fd.write(desc)

    # Save HEAD
    headfile = os.path.join(srcroot, ".git", "logs", "HEAD")
    headfile_prov = os.path.join(exeroot, "GIT_LOGS_HEAD.{}".format(lid))
    if os.path.exists(headfile_prov):
        os.remove(headfile_prov)
    if os.path.exists(headfile):
        safe_copy(headfile, headfile_prov, preserve_meta=False)

    # Save git submodule status
    submodule_prov = os.path.join(exeroot, "GIT_SUBMODULE_STATUS.{}".format(lid))
    subm_status = get_current_submodule_status(recursive=True, repo=srcroot)
    with open(submodule_prov, "w") as fd:
Example #16
0
def generate_baseline(case, baseline_dir=None, allow_baseline_overwrite=False):
    """
    copy the current test output to baseline result

    case - The case containing the hist files to be copied into baselines
    baseline_dir - Optionally, specify a specific baseline dir, otherwise it will be computed from case config
    allow_baseline_overwrite must be true to generate baselines to an existing directory.

    returns (SUCCESS, comments)
    """
    rundir   = case.get_value("RUNDIR")
    if baseline_dir is None:
        baselineroot = case.get_value("BASELINE_ROOT")
        basegen_dir = os.path.join(baselineroot, case.get_value("BASEGEN_CASE"))
    else:
        basegen_dir = baseline_dir
    testcase = case.get_value("CASE")

    if not os.path.isdir(basegen_dir):
        os.makedirs(basegen_dir)

    if (os.path.isdir(os.path.join(basegen_dir,testcase)) and
        not allow_baseline_overwrite):
        expect(False, " Cowardly refusing to overwrite existing baseline directory")

    comments = "Generating baselines into '{}'\n".format(basegen_dir)
    num_gen = 0
    for model in _iter_model_file_substrs(case):
        comments += "  generating for model '{}'\n".format(model)
        hists =  _get_latest_hist_files(testcase, model, rundir)
        logger.debug("latest_files: {}".format(hists))
        num_gen += len(hists)
        for hist in hists:
            basename = hist[hist.rfind(model):]
            baseline = os.path.join(basegen_dir, basename)
            if os.path.exists(baseline):
                os.remove(baseline)

            shutil.copy(hist, baseline)
            comments += "    generating baseline '{}' from file {}\n".format(baseline, hist)

    # copy latest cpl log to baseline
    # drop the date so that the name is generic
    newestcpllogfile = case.get_latest_cpl_log(coupler_log_path=case.get_value("LOGDIR"))
    if newestcpllogfile is None:
        logger.warning("No cpl.log file found in log directory {}".format(case.get_value("LOGDIR")))
    else:
        shutil.copyfile(newestcpllogfile,
                    os.path.join(basegen_dir, "cpl.log.gz"))

    expect(num_gen > 0, "Could not generate any hist files for case '{}', something is seriously wrong".format(os.path.join(rundir, testcase)))
    #make sure permissions are open in baseline directory
    for root, _, files in os.walk(basegen_dir):
        for name in files:
            try:
                os.chmod(os.path.join(root,name), stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IROTH)
            except OSError:
                # We tried. Not worth hard failure here.
                pass

    if get_model() == "e3sm":
        bless_log = os.path.join(basegen_dir, BLESS_LOG_NAME)
        with open(bless_log, "a") as fd:
            fd.write("sha:{} date:{}\n".format(get_current_commit(repo=case.get_value("CIMEROOT")),
                                               get_timestamp(timestamp_format="%Y-%m-%d_%H:%M:%S")))

    return True, comments
Example #17
0
def _generate_baseline_impl(case, baseline_dir=None, allow_baseline_overwrite=False):
    """
    copy the current test output to baseline result

    case - The case containing the hist files to be copied into baselines
    baseline_dir - Optionally, specify a specific baseline dir, otherwise it will be computed from case config
    allow_baseline_overwrite must be true to generate baselines to an existing directory.

    returns (SUCCESS, comments)
    """
    rundir   = case.get_value("RUNDIR")
    ref_case = case.get_value("RUN_REFCASE")
    if baseline_dir is None:
        baselineroot = case.get_value("BASELINE_ROOT")
        basegen_dir = os.path.join(baselineroot, case.get_value("BASEGEN_CASE"))
    else:
        basegen_dir = baseline_dir
    testcase = case.get_value("CASE")
    archive = case.get_env('archive')

    if not os.path.isdir(basegen_dir):
        os.makedirs(basegen_dir)

    if (os.path.isdir(os.path.join(basegen_dir,testcase)) and
        not allow_baseline_overwrite):
        expect(False, " Cowardly refusing to overwrite existing baseline directory")

    comments = "Generating baselines into '{}'\n".format(basegen_dir)
    num_gen = 0
    for model in _iter_model_file_substrs(case):
        comments += "  generating for model '{}'\n".format(model)
        if model == 'cpl':
            file_extensions = archive.get_hist_file_extensions(archive.get_entry('drv'))
        else:
            file_extensions = archive.get_hist_file_extensions(archive.get_entry(model))
        hists =  _get_latest_hist_files(model, rundir, file_extensions, ref_case=ref_case)
        logger.debug("latest_files: {}".format(hists))
        num_gen += len(hists)
        for hist in hists:
            basename = hist[hist.rfind(model):]
            baseline = os.path.join(basegen_dir, basename)
            if os.path.exists(baseline):
                os.remove(baseline)

            safe_copy(hist, baseline, preserve_meta=False)
            comments += "    generating baseline '{}' from file {}\n".format(baseline, hist)

    # copy latest cpl log to baseline
    # drop the date so that the name is generic
    if case.get_value("COMP_INTERFACE") == "nuopc":
        cplname = "med"
    else:
        cplname = "cpl"

    newestcpllogfile = case.get_latest_cpl_log(coupler_log_path=case.get_value("RUNDIR"), cplname=cplname)
    if newestcpllogfile is None:
        logger.warning("No {}.log file found in directory {}".format(cplname,case.get_value("RUNDIR")))
    else:
        safe_copy(newestcpllogfile, os.path.join(basegen_dir, "{}.log.gz".format(cplname)), preserve_meta=False)

    testname = case.get_value("TESTCASE")
    expect(num_gen > 0 or testname == "PFS", "Could not generate any hist files for case '{}', something is seriously wrong".format(os.path.join(rundir, testcase)))

    if get_model() == "e3sm":
        bless_log = os.path.join(basegen_dir, BLESS_LOG_NAME)
        with open(bless_log, "a") as fd:
            fd.write("sha:{} date:{}\n".format(get_current_commit(repo=case.get_value("CIMEROOT")),
                                               get_timestamp(timestamp_format="%Y-%m-%d_%H:%M:%S")))

    return True, comments