Пример #1
0
    def merge_cleanup_final(self):

        script_dir = self.config_prep['script_dir']

        # start with script_dir; separate tar files for CPU* and TRAINOPT*
        prefix = (f"CPU")
        util.compress_files(+1, script_dir, f"{prefix}*", prefix, "")

        prefix = (f"{TRAINOPT_STRING}")
        util.compress_files(+1, script_dir, f"{prefix}*", prefix, "")
Пример #2
0
    def merge_cleanup_final(self):
        # every makeDataFiles succeeded, so here we simply compress output.

        submit_info_yaml = self.config_prep['submit_info_yaml']
        output_dir = self.config_prep['output_dir']
        script_dir = submit_info_yaml['SCRIPT_DIR']
        cwd = submit_info_yaml['CWD']
        output_format = submit_info_yaml['OUTPUT_FORMAT']
        isfmt_snana = (output_format == OUTPUT_FORMAT_SNANA)
        isfmt_lsst_alert = (output_format == OUTPUT_FORMAT_LSST_ALERTS)
        msgerr = []

        if isfmt_snana:
            command_list = [
                'makeDataFiles.sh', '--outdir_snana', output_dir, '--merge'
            ]
            ret = subprocess.run(command_list, capture_output=False, text=True)

        elif isfmt_lsst_alert:
            wildcard_base = f"{BASE_PREFIX}*.csv.gz"

            wildcard = f"{script_dir}/{wildcard_base}"
            combined_file = f"{output_dir}/ALERTS_TRUTH.csv.gz"
            util.combine_csv_files(wildcard, combined_file, True)

            # xxx nothing to compress after combining csv files
            #print(f"\t Compress {wildcard_base}")
            #sys.stdout.flush()
            #util.compress_files(+1, script_dir, wildcard_base, "csv", "" )

        else:
            msgerr.append(f"Unknown format '{output_format}")
            util.log_assert(False, msgerr)  # just abort, no done stamp

        #print(ret,' debugging')

        wildcard_list = [
            'MAKEDATA',
            'CPU',
        ]
        for w in wildcard_list:
            wstar = f"{w}*"
            tmp_list = glob.glob1(script_dir, wstar)
            if len(tmp_list) == 0: continue
            print(f"\t Compress {wstar}")
            sys.stdout.flush()
            util.compress_files(+1, script_dir, wstar, w, "")

        # - - - -
        # tar up entire script dir
        util.compress_subdir(+1, script_dir)
Пример #3
0
    def merge_cleanup_final(self):
        output_dir       = self.config_prep['output_dir']
        submit_info_yaml = self.config_prep['submit_info_yaml']
        script_dir       = submit_info_yaml['SCRIPT_DIR']
        jobfile_wildcard = submit_info_yaml['JOBFILE_WILDCARD']
        script_subdir    = SUBDIR_SCRIPTS_WFIT

        self.make_wfit_summary()

        logging.info(f"  wfit cleanup: compress {JOB_SUFFIX_TAR_LIST}")
        for suffix in JOB_SUFFIX_TAR_LIST :
            wildcard = (f"{jobfile_wildcard}*.{suffix}") 
            util.compress_files(+1, script_dir, wildcard, suffix, "" )

        logging.info("")

        self.merge_cleanup_script_dir()
Пример #4
0
    def merge_cleanup_final(self):
        # every snlc_fit job succeeded, so here we simply compress output.

        submit_info_yaml = self.config_prep['submit_info_yaml']
        output_dir = self.config_prep['output_dir']
        script_dir = submit_info_yaml['SCRIPT_DIR']

        wildcard_list = ['TRAINOPT', 'CPU', 'CALIB_SHIFT']
        for w in wildcard_list:
            wstar = f"{w}*"
            tmp_list = glob.glob1(script_dir, wstar)
            if len(tmp_list) == 0: continue
            print(f"\t Compress {wstar}")
            util.compress_files(+1, script_dir, wstar, w, "")

        # - - - -
        # tar up entire script dir
        util.compress_subdir(+1, script_dir)