def main_method(config): """main method for processing spectra. Parameters ---------- config : :obj:`Config` Configuration object Returns ------- int 0 on success """ # initialize logger init_logger("process_spectra", config.logdir, config.log_level) start_message = "Running process_spectra {}".format(VERSION) logger.info(start_message) if config.process_method.lower() == 'amazed': amazed(config) elif config.process_method.lower() == 'dummy': dummy(config) else: raise "Unknown process_method {}".format(config.process_method) return 0
def main_method(config): """main_method Parameters ---------- config : :obj:`Config` Configuration object Returns ------- int 0 on success """ # initialize logger logger = init_logger("pre_process", config.logdir, config.log_level) start_message = "Running pre_process {}".format(VERSION) logger.info(start_message) spectra_dir = normpath(config.workdir, config.spectra_dir) # bunch bunch_list = [] for i, spc_list in enumerate(bunch(config.bunch_size, spectra_dir)): spectralist_file = os.path.join(config.output_dir, 'spectralist_B{}.json'.format(str(i))) with open(spectralist_file, "w") as ff: json.dump(spc_list, ff) bunch_list.append(spectralist_file) # create json containing list of bunches with open(config.bunch_list, 'w') as f: json.dump(bunch_list, f) return 0
def main_method(config): """main_method Parameters ---------- config : :obj:`Config` Configuration object Returns ------- int 0 on success """ # initialize logger logger = init_logger("merge_results", config.logdir, config.log_level) start_message = "Running merge_results {}".format(VERSION) logger.info(start_message) if not os.path.exists(config.bunch_listfile): raise FileNotFoundError("Bunch list file not found : {}".format( config.bunch_listfile)) with open(config.bunch_listfile, "r") as ff: bunch_list = json.load(ff) data_dir = os.path.join(config.output_dir, 'data') os.makedirs(data_dir, exist_ok=True) for bunch in bunch_list: if not os.path.exists(bunch): raise FileNotFoundError( "Bunch directory not found : {}".format(bunch)) bunch_data_dir = os.path.join(bunch, "data") if not os.path.exists(bunch_data_dir): raise FileNotFoundError( "Bunch data directory not found : {}".format(bunch_data_dir)) to_merge = os.listdir(bunch_data_dir) for pfs_candidate in to_merge: shutil.move(os.path.join(bunch_data_dir, pfs_candidate), os.path.join(data_dir, pfs_candidate)) return 0
def main_method(config): """main_method Parameters ---------- config : :obj:`Config` Configuration object Returns ------- int 0 on success """ # initialize logger logger = init_logger("merge_results", config.logdir, config.log_level) start_message = "Running merge_results {}".format(VERSION) logger.info(start_message) if not os.path.exists(config.bunch_listfile): raise FileNotFoundError("Bunch list file not found : {}".format( config.bunch_listfile)) with open(config.bunch_listfile, "r") as ff: bunch_list = json.load(ff) data_dir = os.path.join(config.output_dir, 'data') os.makedirs(data_dir, exist_ok=True) galaxy_summary_list = [] stellar_summary_list = [] qso_summary_list = [] for bunch in bunch_list: if not os.path.exists(bunch): raise FileNotFoundError( "Bunch directory not found : {}".format(bunch)) bunch_data_dir = os.path.join(bunch, "data") if not os.path.exists(bunch_data_dir): raise FileNotFoundError( "Bunch data directory not found : {}".format(bunch_data_dir)) to_merge = os.listdir(bunch_data_dir) for pfs_candidate in to_merge: shutil.move(os.path.join(bunch_data_dir, pfs_candidate), os.path.join(data_dir, pfs_candidate)) try: amazed_results = RedshiftSummary(output_dir=bunch) amazed_results.read() galaxy_summary_list.extend(amazed_results.summary) except FileNotFoundError: raise FileNotFoundError( "Redshift summary file not found in {}".format(bunch)) try: amazed_results = StellarSummary(output_dir=bunch) amazed_results.read() stellar_summary_list.extend(amazed_results.summary) except: pass try: amazed_results = QsoSummary(output_dir=bunch) amazed_results.read() qso_summary_list.extend(amazed_results.summary) except: pass gsr = RedshiftSummary(output_dir=config.output_dir) gsr.summary = galaxy_summary_list gsr.write() ssr = StellarSummary(output_dir=config.output_dir) ssr.summary = stellar_summary_list ssr.write() qsr = QsoSummary(output_dir=config.output_dir) qsr.summary = qso_summary_list qsr.write() return 0
def main_method(config): """Run the 1D Data Reduction Pipeline. Returns ------- int 0 on success """ # initialize logger logger = init_logger('scheduler', config.logdir, config.log_level) start_message = "Running drp_1dpipe {}".format(VERSION) logger.info(start_message) # Launch banner print(start_message) # set workdir environment init_environ(config.workdir) runner_class = get_runner(config.scheduler) # if not runner_class: # error_message = "Unknown runner {}".format(config.scheduler) # logger.error(error_message) # raise error_message notifier = init_notifier(config.notification_url) json_bunch_list = normpath(config.output_dir, 'bunchlist.json') notifier.update('root', 'RUNNING') notifier.update('pre_process', 'RUNNING') with TemporaryFilesSet(keep_tempfiles=config.log_level <= logging.DEBUG) as tmpcontext: runner = runner_class(config, tmpcontext) # prepare workdir try: runner.single('pre_process', args={'workdir': normpath(config.workdir), 'logdir': normpath(config.logdir), 'bunch_size': config.bunch_size, 'spectra_dir': normpath(config.spectra_dir), 'bunch_list': json_bunch_list, 'output_dir': normpath(config.output_dir) }) except Exception as e: traceback.print_exc() notifier.update('pre_process', 'ERROR') return 1 else: notifier.update('pre_process', 'SUCCESS') # tmpcontext.add_files(json_bunch_list) # process spectra bunch_list, output_list, logdir_list = map_process_spectra_entries( json_bunch_list, config.output_dir, config.logdir) try: # runner.parallel('process_spectra', bunch_list, # 'spectra-listfile', ['output-dir','logdir'], runner.parallel('process_spectra', parallel_args={ 'spectra_listfile': bunch_list, 'output_dir': output_list, 'logdir': logdir_list }, args={ 'workdir': normpath(config.workdir), 'lineflux': config.lineflux, 'spectra_dir': normpath(config.spectra_dir), 'parameters_file': config.parameters_file, 'linemeas_parameters_file': config.linemeas_parameters_file, 'stellar': config.stellar }) except Exception as e: traceback.print_exc() notifier.update('root', 'ERROR') else: notifier.update('root', 'SUCCESS') json_reduce = normpath(config.output_dir, 'reduce.json') reduce_process_spectra_output(json_bunch_list, config.output_dir, json_reduce) try: runner.single('merge_results', args={ 'workdir': normpath(config.workdir), 'logdir': normpath(config.logdir), 'output_dir': normpath(config.output_dir), 'bunch_listfile': json_reduce }) except Exception as e: traceback.print_exc() notifier.update('merge_results', 'ERROR') return 1 else: notifier.update('merge_results', 'SUCCESS') aux_data_list = list_aux_data(json_bunch_list, config.output_dir) for aux_dir in aux_data_list: tmpcontext.add_dirs(aux_dir) return 0