def _compare_baseline(self): """ Compare baselines in the pergro test sense. That is, compare PGE from the test simulation with the baseline cloud """ with self._test_status: self._test_status.set_status(CIME.test_status.BASELINE_PHASE, CIME.test_status.TEST_FAIL_STATUS) logger.debug("PGN_INFO:BASELINE COMPARISON STARTS") run_dir = self._case.get_value("RUNDIR") case_name = self._case.get_value("CASE") base_dir = os.path.join(self._case.get_value("BASELINE_ROOT"), self._case.get_value("BASECMP_CASE")) var_list = self.get_var_list() test_name = "{}".format(case_name.split('.')[-1]) evv_config = { test_name: { "module": os.path.join(evv_lib_dir, "extensions", "pg.py"), "test-case": case_name, "test-name": "Test", "test-dir": run_dir, "ref-name": "Baseline", "ref-dir": base_dir, "variables": var_list, "perturbations": PERTURBATIONS, "pge-cld": FCLD_NC, "ninit": NUMBER_INITIAL_CONDITIONS, "init-file-template": INIT_COND_FILE_TEMPLATE, "instance-file-template": INSTANCE_FILE_TEMPLATE, } } json_file = os.path.join(run_dir, '.'.join([case_name, 'json'])) with open(json_file, 'w') as config_file: json.dump(evv_config, config_file, indent=4) evv_out_dir = os.path.join(run_dir, '.'.join([case_name, 'evv'])) evv(['-e', json_file, '-o', evv_out_dir]) with open(os.path.join(evv_out_dir, 'index.json'), 'r') as evv_f: evv_status = json.load(evv_f) for evv_elem in evv_status['Data']['Elements']: if evv_elem['Type'] == 'ValSummary' \ and evv_elem['TableTitle'] == 'Perturbation growth test': if evv_elem['Data'][test_name]['']['Test status'].lower( ) == 'pass': self._test_status.set_status( CIME.test_status.BASELINE_PHASE, CIME.test_status.TEST_PASS_STATUS) break
def _compare_baseline(self): with self._test_status: if int(self._case.get_value("RESUBMIT")) > 0: # This is here because the comparison is run for each submission # and we only want to compare once the whole run is finished. We # need to return a pass here to continue the submission process. self._test_status.set_status(CIME.test_status.BASELINE_PHASE, CIME.test_status.TEST_PASS_STATUS) return self._test_status.set_status(CIME.test_status.BASELINE_PHASE, CIME.test_status.TEST_FAIL_STATUS) run_dir = self._case.get_value("RUNDIR") case_name = self._case.get_value("CASE") basecmp_case = self._case.get_value("BASECMP_CASE") base_dir = os.path.join(self._case.get_value("BASELINE_ROOT"), basecmp_case) test_name = "{}".format(case_name.split('.')[-1]) evv_config = { test_name: { "module": os.path.join(evv_lib_dir, "extensions", "ks.py"), "case1": "Test", "dir1": run_dir, "case2": "Baseline", "dir2": base_dir, "ninst": ninst, "critical": 13 } } json_file = os.path.join(run_dir, '.'.join([case_name, 'json'])) with open(json_file, 'w') as config_file: json.dump(evv_config, config_file, indent=4) evv_out_dir = os.path.join(run_dir, '.'.join([case_name, 'eve'])) evv(['-e', json_file, '-o', evv_out_dir]) with open(os.path.join(evv_out_dir, 'index.json'), 'r') as evv_f: evv_status = json.load(evv_f) for evv_elem in evv_status['Data']['Elements']: if evv_elem['Type'] == 'ValSummary': if evv_elem['TableTitle'] == 'Kolmogorov-Smirnov': if evv_elem['Data'][test_name][''][ 'Ensembles'] == 'identical': self._test_status.set_status( CIME.test_status.BASELINE_PHASE, CIME.test_status.TEST_PASS_STATUS) break
def _compare_baseline(self): with self._test_status: if int(self._case.get_value("RESUBMIT")) > 0: # This is here because the comparison is run for each submission # and we only want to compare once the whole run is finished. We # need to return a pass here to continue the submission process. self._test_status.set_status(CIME.test_status.BASELINE_PHASE, CIME.test_status.TEST_PASS_STATUS) return self._test_status.set_status(CIME.test_status.BASELINE_PHASE, CIME.test_status.TEST_FAIL_STATUS) run_dir = self._case.get_value("RUNDIR") case_name = self._case.get_value("CASE") basecmp_case = self._case.get_value("BASECMP_CASE") base_dir = os.path.join(self._case.get_value("BASELINE_ROOT"), basecmp_case) test_name = "{}".format(case_name.split('.')[-1]) evv_config = { test_name: { "module": os.path.join(evv_lib_dir, "extensions", "ks.py"), "case1": "Test", "dir1": run_dir, "case2": "Baseline", "dir2": base_dir, "ninst": ninst, "critical": 13 } } json_file = os.path.join(run_dir, '.'.join([case_name, 'json'])) with open(json_file, 'w') as config_file: json.dump(evv_config, config_file, indent=4) evv_out_dir = os.path.join(run_dir, '.'.join([case_name, 'eve'])) evv(['-e', json_file, '-o', evv_out_dir]) with open(os.path.join(evv_out_dir, 'index.json'), 'r') as evv_f: evv_status = json.load(evv_f) for evv_elem in evv_status['Data']['Elements']: if evv_elem['Type'] == 'ValSummary': if evv_elem['TableTitle'] == 'Kolmogorov-Smirnov': if evv_elem['Data'][test_name]['']['Ensembles'] == 'identical': self._test_status.set_status(CIME.test_status.BASELINE_PHASE, CIME.test_status.TEST_PASS_STATUS) break
def _compare_baseline(self): with self._test_status as ts: ts.set_status(CIME.test_status.BASELINE_PHASE, CIME.test_status.TEST_FAIL_STATUS) run_dir = self._case.get_value("RUNDIR") case_name = self._case.get_value("CASE") base_dir = os.path.join(self._case.get_value("BASELINE_ROOT"), self._case.get_value("BASECMP_CASE")) test_name = "{}".format(case_name.split('.')[-1]) evv_config = { test_name: { "module": os.path.join(evv_lib_dir, "extensions", "tsc.py"), "test-case": case_name, "test-dir": run_dir, "ref-case": "Baseline", "ref-dir": base_dir, "time-slice": [OUT_FREQ, SIM_LENGTH], "inspect-times": INSPECT_AT, "variables": VAR_LIST, "p-threshold": P_THRESHOLD, } } json_file = os.path.join(run_dir, '.'.join([case_name, 'json'])) with open(json_file, 'w') as config_file: json.dump(evv_config, config_file, indent=4) evv_out_dir = os.path.join(run_dir, '.'.join([case_name, 'evv'])) evv(['-e', json_file, '-o', evv_out_dir]) with open(os.path.join(evv_out_dir, 'index.json'), 'r') as evv_f: evv_status = json.load(evv_f) for evv_elem in evv_status['Data']['Elements']: if evv_elem['Type'] == 'ValSummary' \ and evv_elem['TableTitle'] == 'Time step convergence test': if evv_elem['Data'][test_name]['']['Test status'].lower( ) == 'pass': self._test_status.set_status( CIME.test_status.BASELINE_PHASE, CIME.test_status.TEST_PASS_STATUS) break
def _compare_baseline(self): with self._test_status as ts: ts.set_status(CIME.test_status.BASELINE_PHASE, CIME.test_status.TEST_FAIL_STATUS) run_dir = self._case.get_value("RUNDIR") case_name = self._case.get_value("CASE") base_dir = os.path.join(self._case.get_value("BASELINE_ROOT"), self._case.get_value("BASECMP_CASE")) test_name = "{}".format(case_name.split('.')[-1]) evv_config = { test_name: { "module": os.path.join(evv_lib_dir, "extensions", "tsc.py"), "test-case": case_name, "test-dir": run_dir, "ref-case": "Baseline", "ref-dir": base_dir, "time-slice": [OUT_FREQ, SIM_LENGTH], "inspect-times": INSPECT_AT, "variables": VAR_LIST, "p-threshold": P_THRESHOLD, } } json_file = os.path.join(run_dir, '.'.join([case_name, 'json'])) with open(json_file, 'w') as config_file: json.dump(evv_config, config_file, indent=4) evv_out_dir = os.path.join(run_dir, '.'.join([case_name, 'evv'])) evv(['-e', json_file, '-o', evv_out_dir]) with open(os.path.join(evv_out_dir, 'index.json'), 'r') as evv_f: evv_status = json.load(evv_f) comments = "" for evv_elem in evv_status['Data']['Elements']: if evv_elem['Type'] == 'ValSummary' \ and evv_elem['TableTitle'] == 'Time step convergence test': comments = "; ".join("{}: {}".format(key, val) for key, val in evv_elem['Data'] [test_name][''].items()) if evv_elem['Data'][test_name]['']['Test status'].lower( ) == 'pass': self._test_status.set_status( CIME.test_status.BASELINE_PHASE, CIME.test_status.TEST_PASS_STATUS) break status = self._test_status.get_status( CIME.test_status.BASELINE_PHASE) mach_name = self._case.get_value("MACH") mach_obj = Machines(machine=mach_name) htmlroot = CIME.utils.get_htmlroot(mach_obj) urlroot = CIME.utils.get_urlroot(mach_obj) if htmlroot is not None: with CIME.utils.SharedArea(): dir_util.copy_tree(evv_out_dir, os.path.join(htmlroot, 'evv', case_name), preserve_mode=False) if urlroot is None: urlroot = "[{}_URL]".format(mach_name.capitalize()) viewing = "{}/evv/{}/index.html".format(urlroot, case_name) else: viewing = "{}\n" \ " EVV viewing instructions can be found at: " \ " https://github.com/E3SM-Project/E3SM/blob/master/cime/scripts/" \ "climate_reproducibility/README.md#test-passfail-and-extended-output" \ "".format(evv_out_dir) comments = "{} {} for test '{}'.\n" \ " {}\n" \ " EVV results can be viewed at:\n" \ " {}".format(CIME.test_status.BASELINE_PHASE, status, test_name, comments, viewing) CIME.utils.append_testlog(comments, self._orig_caseroot)
def _compare_baseline(self): with self._test_status: if int(self._case.get_value("RESUBMIT")) > 0: # This is here because the comparison is run for each submission # and we only want to compare once the whole run is finished. We # need to return a pass here to continue the submission process. self._test_status.set_status( CIME.test_status.BASELINE_PHASE, CIME.test_status.TEST_PASS_STATUS ) return self._test_status.set_status( CIME.test_status.BASELINE_PHASE, CIME.test_status.TEST_FAIL_STATUS ) run_dir = self._case.get_value("RUNDIR") case_name = self._case.get_value("CASE") base_dir = os.path.join( self._case.get_value("BASELINE_ROOT"), self._case.get_value("BASECMP_CASE"), ) test_name = "{}".format(case_name.split(".")[-1]) evv_config = { test_name: { "module": os.path.join(evv_lib_dir, "extensions", "ks.py"), "test-case": "Test", "test-dir": run_dir, "ref-case": "Baseline", "ref-dir": base_dir, "var-set": "default", "ninst": NINST, "critical": 13, "component": self.component, } } json_file = os.path.join(run_dir, ".".join([case_name, "json"])) with open(json_file, "w") as config_file: json.dump(evv_config, config_file, indent=4) evv_out_dir = os.path.join(run_dir, ".".join([case_name, "evv"])) evv(["-e", json_file, "-o", evv_out_dir]) with open(os.path.join(evv_out_dir, "index.json")) as evv_f: evv_status = json.load(evv_f) comments = "" for evv_ele in evv_status["Page"]["elements"]: if "Table" in evv_ele: comments = "; ".join( "{}: {}".format(key, val[0]) for key, val in evv_ele["Table"]["data"].items() ) if evv_ele["Table"]["data"]["Test status"][0].lower() == "pass": self._test_status.set_status( CIME.test_status.BASELINE_PHASE, CIME.test_status.TEST_PASS_STATUS, ) break status = self._test_status.get_status(CIME.test_status.BASELINE_PHASE) mach_name = self._case.get_value("MACH") mach_obj = Machines(machine=mach_name) htmlroot = CIME.utils.get_htmlroot(mach_obj) urlroot = CIME.utils.get_urlroot(mach_obj) if htmlroot is not None: with CIME.utils.SharedArea(): dir_util.copy_tree( evv_out_dir, os.path.join(htmlroot, "evv", case_name), preserve_mode=False, ) if urlroot is None: urlroot = "[{}_URL]".format(mach_name.capitalize()) viewing = "{}/evv/{}/index.html".format(urlroot, case_name) else: viewing = ( "{}\n" " EVV viewing instructions can be found at: " " https://github.com/E3SM-Project/E3SM/blob/master/cime/scripts/" "climate_reproducibility/README.md#test-passfail-and-extended-output" "".format(evv_out_dir) ) comments = ( "{} {} for test '{}'.\n" " {}\n" " EVV results can be viewed at:\n" " {}".format( CIME.test_status.BASELINE_PHASE, status, test_name, comments, viewing, ) ) CIME.utils.append_testlog(comments, self._orig_caseroot)
def _compare_baseline(self): """ Compare baselines in the pergro test sense. That is, compare PGE from the test simulation with the baseline cloud """ with self._test_status: self._test_status.set_status(CIME.test_status.BASELINE_PHASE, CIME.test_status.TEST_FAIL_STATUS) logger.debug("PGN_INFO:BASELINE COMPARISON STARTS") run_dir = self._case.get_value("RUNDIR") case_name = self._case.get_value("CASE") base_dir = os.path.join(self._case.get_value("BASELINE_ROOT"), self._case.get_value("BASECMP_CASE")) var_list = self.get_var_list() test_name = "{}".format(case_name.split('.')[-1]) evv_config = { test_name: { "module": os.path.join(evv_lib_dir, "extensions", "pg.py"), "test-case": case_name, "test-name": "Test", "test-dir": run_dir, "ref-name": "Baseline", "ref-dir": base_dir, "variables": var_list, "perturbations": PERTURBATIONS, "pge-cld": FCLD_NC, "ninit": NUMBER_INITIAL_CONDITIONS, "init-file-template": INIT_COND_FILE_TEMPLATE, "instance-file-template": INSTANCE_FILE_TEMPLATE, "init-model": "cam", "component": self.atmmod, } } json_file = os.path.join(run_dir, '.'.join([case_name, 'json'])) with open(json_file, 'w') as config_file: json.dump(evv_config, config_file, indent=4) evv_out_dir = os.path.join(run_dir, '.'.join([case_name, 'evv'])) evv(['-e', json_file, '-o', evv_out_dir]) with open(os.path.join(evv_out_dir, 'index.json'), 'r') as evv_f: evv_status = json.load(evv_f) comments = "" for evv_elem in evv_status['Data']['Elements']: if evv_elem['Type'] == 'ValSummary' \ and evv_elem['TableTitle'] == 'Perturbation growth test': comments = "; ".join("{}: {}".format(key, val) for key, val in evv_elem['Data'] [test_name][''].items()) if evv_elem['Data'][test_name]['']['Test status'].lower( ) == 'pass': self._test_status.set_status( CIME.test_status.BASELINE_PHASE, CIME.test_status.TEST_PASS_STATUS) break status = self._test_status.get_status( CIME.test_status.BASELINE_PHASE) mach_name = self._case.get_value("MACH") mach_obj = Machines(machine=mach_name) htmlroot = CIME.utils.get_htmlroot(mach_obj) urlroot = CIME.utils.get_urlroot(mach_obj) if htmlroot is not None: with CIME.utils.SharedArea(): dir_util.copy_tree(evv_out_dir, os.path.join(htmlroot, 'evv', case_name), preserve_mode=False) if urlroot is None: urlroot = "[{}_URL]".format(mach_name.capitalize()) viewing = "{}/evv/{}/index.html".format(urlroot, case_name) else: viewing = "{}\n" \ " EVV viewing instructions can be found at: " \ " https://github.com/E3SM-Project/E3SM/blob/master/cime/scripts/" \ "climate_reproducibility/README.md#test-passfail-and-extended-output" \ "".format(evv_out_dir) comments = "{} {} for test '{}'.\n" \ " {}\n" \ " EVV results can be viewed at:\n" \ " {}".format(CIME.test_status.BASELINE_PHASE, status, test_name, comments, viewing) CIME.utils.append_testlog(comments, self._orig_caseroot)
def _compare_baseline(self): with self._test_status as ts: ts.set_status(CIME.test_status.BASELINE_PHASE, CIME.test_status.TEST_FAIL_STATUS) run_dir = self._case.get_value("RUNDIR") case_name = self._case.get_value("CASE") base_dir = os.path.join( self._case.get_value("BASELINE_ROOT"), self._case.get_value("BASECMP_CASE"), ) test_name = "{}".format(case_name.split(".")[-1]) evv_config = { test_name: { "module": os.path.join(evv_lib_dir, "extensions", "tsc.py"), "test-case": case_name, "test-dir": run_dir, "ref-case": "Baseline", "ref-dir": base_dir, "time-slice": [OUT_FREQ, SIM_LENGTH], "inspect-times": INSPECT_AT, "variables": VAR_LIST, "p-threshold": P_THRESHOLD, "component": self.atmmod, } } json_file = os.path.join(run_dir, ".".join([case_name, "json"])) with open(json_file, "w") as config_file: json.dump(evv_config, config_file, indent=4) evv_out_dir = os.path.join(run_dir, ".".join([case_name, "evv"])) evv(["-e", json_file, "-o", evv_out_dir]) with open(os.path.join(evv_out_dir, "index.json"), "r") as evv_f: evv_status = json.load(evv_f) comments = "" for evv_ele in evv_status["Page"]["elements"]: if "Table" in evv_ele: comments = "; ".join( "{}: {}".format(key, val[0]) for key, val in evv_ele["Table"]["data"].items()) if evv_ele["Table"]["data"]["Test status"][0].lower( ) == "pass": self._test_status.set_status( CIME.test_status.BASELINE_PHASE, CIME.test_status.TEST_PASS_STATUS, ) break status = self._test_status.get_status( CIME.test_status.BASELINE_PHASE) mach_name = self._case.get_value("MACH") mach_obj = Machines(machine=mach_name) htmlroot = CIME.utils.get_htmlroot(mach_obj) urlroot = CIME.utils.get_urlroot(mach_obj) if htmlroot is not None: with CIME.utils.SharedArea(): dir_util.copy_tree( evv_out_dir, os.path.join(htmlroot, "evv", case_name), preserve_mode=False, ) if urlroot is None: urlroot = "[{}_URL]".format(mach_name.capitalize()) viewing = "{}/evv/{}/index.html".format(urlroot, case_name) else: viewing = ( "{}\n" " EVV viewing instructions can be found at: " " https://github.com/E3SM-Project/E3SM/blob/master/cime/scripts/" "climate_reproducibility/README.md#test-passfail-and-extended-output" "".format(evv_out_dir)) comments = ("{} {} for test '{}'.\n" " {}\n" " EVV results can be viewed at:\n" " {}".format( CIME.test_status.BASELINE_PHASE, status, test_name, comments, viewing, )) CIME.utils.append_testlog(comments, self._orig_caseroot)
def _compare_baseline(self): with self._test_status: if int(self._case.get_value("RESUBMIT")) > 0: # This is here because the comparison is run for each submission # and we only want to compare once the whole run is finished. We # need to return a pass here to continue the submission process. self._test_status.set_status(CIME.test_status.BASELINE_PHASE, CIME.test_status.TEST_PASS_STATUS) return self._test_status.set_status(CIME.test_status.BASELINE_PHASE, CIME.test_status.TEST_FAIL_STATUS) run_dir = self._case.get_value("RUNDIR") case_name = self._case.get_value("CASE") base_dir = os.path.join(self._case.get_value("BASELINE_ROOT"), self._case.get_value("BASECMP_CASE")) test_name = "{}".format(case_name.split('.')[-1]) evv_config = { test_name: { "module": os.path.join(evv_lib_dir, "extensions", "ks.py"), "test-case": "Test", "test-dir": run_dir, "ref-case": "Baseline", "ref-dir": base_dir, "var-set": "default", "ninst": NINST, "critical": 13 } } json_file = os.path.join(run_dir, '.'.join([case_name, 'json'])) with open(json_file, 'w') as config_file: json.dump(evv_config, config_file, indent=4) evv_out_dir = os.path.join(run_dir, '.'.join([case_name, 'evv'])) evv(['-e', json_file, '-o', evv_out_dir]) with open(os.path.join(evv_out_dir, 'index.json')) as evv_f: evv_status = json.load(evv_f) comments = "" for evv_elem in evv_status['Data']['Elements']: if evv_elem['Type'] == 'ValSummary' \ and evv_elem['TableTitle'] == 'Kolmogorov-Smirnov test': comments = "; ".join("{}: {}".format(key, val) for key, val in evv_elem['Data'][test_name][''].items()) if evv_elem['Data'][test_name]['']['Test status'].lower() == 'pass': self._test_status.set_status(CIME.test_status.BASELINE_PHASE, CIME.test_status.TEST_PASS_STATUS) break status = self._test_status.get_status(CIME.test_status.BASELINE_PHASE) mach_name = self._case.get_value("MACH") mach_obj = Machines(machine=mach_name) htmlroot = CIME.utils.get_htmlroot(mach_obj) urlroot = CIME.utils.get_urlroot(mach_obj) if htmlroot is not None: with CIME.utils.SharedArea(): dir_util.copy_tree(evv_out_dir, os.path.join(htmlroot, 'evv', case_name), preserve_mode=False) if urlroot is None: urlroot = "[{}_URL]".format(mach_name.capitalize()) viewing = "{}/evv/{}/index.html".format(urlroot, case_name) else: viewing = "{}\n" \ " EVV viewing instructions can be found at: " \ " https://github.com/E3SM-Project/E3SM/blob/master/cime/scripts/" \ "climate_reproducibility/README.md#test-passfail-and-extended-output" \ "".format(evv_out_dir) comments = "{} {} for test '{}'.\n" \ " {}\n" \ " EVV results can be viewed at:\n" \ " {}".format(CIME.test_status.BASELINE_PHASE, status, test_name, comments, viewing) CIME.utils.append_testlog(comments, self._orig_caseroot)