def test_find_r0_subrun_trash(tmp_path): from lstchain.onsite import find_r0_subrun # test we ignore everything not looking like a date tmp_r0 = tmp_path / 'R0' trash = tmp_r0 / 'Trash' correct = tmp_r0 / '20200218' trash.mkdir(parents=True) correct.mkdir(parents=True) shutil.copy2(test_subrun1, trash / test_subrun1.name) shutil.copy2(test_subrun1, correct / test_subrun1.name) path = find_r0_subrun(2008, 0, tmp_r0) assert path.resolve().parent == correct
def test_find_r0_subrun(tmp_path): from lstchain.onsite import find_r0_subrun tmp_r0 = tmp_path / 'R0' correct = tmp_r0 / test_subrun1.parent.name correct.mkdir(parents=True) shutil.copy2(test_subrun1, correct / test_subrun1.name) # copy another run so we can make sure we really find the right one other = tmp_r0 / test_subrun2.parent.name other.mkdir(parents=True) shutil.copy2(test_subrun2, other / test_subrun2.name) path = find_r0_subrun(2008, 0, tmp_r0) assert path.resolve().parent == correct
def main(): args = parser.parse_args() run = args.run_number prod_id = args.prod_version stat_events = args.statistics time_run = args.time_run sys_date = args.sys_date no_sys_correction = args.no_sys_correction output_base_name = args.output_base_name sub_run = args.sub_run tel_id = args.tel_id config_file = args.config yes = args.yes pro_symlink = not args.no_pro_symlink # looks for the filter values in the database if not given if args.filters is None: filters = search_filter(run, args.mongodb) else: filters = args.filters if filters is None: sys.exit(f"Missing filter value for run {run}. \n") # define the FF selection cuts if args.min_ff is None or args.max_ff is None: min_ff, max_ff = define_FF_selection_range(filters) else: min_ff, max_ff = args.min_ff, args.max_ff print(f"\n--> Start calculating calibration from run {run}, filters {filters}") # verify config file if not config_file.exists(): raise IOError(f"Config file {config_file} does not exists. \n") print(f"\n--> Config file {config_file}") # verify input file r0_dir = args.r0_dir or args.base_dir / 'R0' input_file = find_r0_subrun(run, sub_run, r0_dir) date = input_file.parent.name print(f"\n--> Input file: {input_file}") # verify output dir calib_dir = args.base_dir / LEVEL_A_PIXEL_DIR output_dir = calib_dir / "calibration" / date / prod_id if not output_dir.exists(): print(f"--> Create directory {output_dir}") output_dir.mkdir(parents=True, exist_ok=True) if pro_symlink: pro = "pro" create_pro_symlink(output_dir) else: pro = prod_id # make log dir log_dir = output_dir / "log" if not log_dir.exists(): print(f"--> Create directory {log_dir}") log_dir.mkdir(parents=True, exist_ok=True) # search the summary file info run_summary_path = find_run_summary(date, args.base_dir) print(f"\n--> Use run summary {run_summary_path}") pedestal_file = find_pedestal_file(pro, args.pedestal_run, date=date, base_dir=args.base_dir) print(f"\n--> Pedestal file: {pedestal_file}") # search for time calibration file time_file = find_time_calibration_file(pro, run, time_run, args.base_dir) print(f"\n--> Time calibration file: {time_file}") # define systematic correction file if no_sys_correction: systematics_file = None else: systematics_file = find_systematics_correction_file(pro, date, sys_date, args.base_dir) print(f"\n--> F-factor systematics correction file: {systematics_file}") # define charge file names print("\n***** PRODUCE CHARGE CALIBRATION FILE ***** ") if filters is not None: filter_info = f"_filters_{filters}" else: filter_info = "" # remember there are no systematic corrections prefix = "no_sys_corrected_" if no_sys_correction else "" output_name = f"{prefix}{output_base_name}{filter_info}.Run{run:05d}.{sub_run:04d}" output_file = output_dir / f'{output_name}.h5' print(f"\n--> Output file {output_file}") log_file = log_dir / f"{output_name}.log" print(f"\n--> Log file {log_file}") if output_file.exists(): remove = False if not yes and os.getenv('SLURM_JOB_ID') is None: remove = query_yes_no(">>> Output file exists already. Do you want to remove it?") if yes or remove: os.remove(output_file) os.remove(log_file) else: print("\n--> Output file exists already. Stop") exit(1) # # produce ff calibration file # cmd = [ "lstchain_create_calibration_file", f"--input_file={input_file}", f"--output_file={output_file}", "--LSTEventSource.default_trigger_type=tib", f"--EventSource.min_flatfield_adc={min_ff}", f"--EventSource.max_flatfield_adc={max_ff}", f"--LSTCalibrationCalculator.systematic_correction_path={systematics_file}", f"--LSTEventSource.EventTimeCalculator.run_summary_path={run_summary_path}", f"--LSTEventSource.LSTR0Corrections.drs4_time_calibration_path={time_file}", f"--LSTEventSource.LSTR0Corrections.drs4_pedestal_path={pedestal_file}", f"--LSTEventSource.use_flatfield_heuristic={args.use_flatfield_heuristic}", f"--FlatFieldCalculator.sample_size={stat_events}", f"--PedestalCalculator.sample_size={stat_events}", f"--config={config_file}", f"--log-file={log_file}", "--log-file-level=DEBUG", ] print("\n--> RUNNING...") subprocess.run(cmd, check=True) # plot and save some results plot_file = f"{output_dir}/log/{output_name}.pdf" print(f"\n--> PRODUCING PLOTS in {plot_file} ...") calib.read_file(output_file, tel_id) calib.plot_all(calib.ped_data, calib.ff_data, calib.calib_data, run, plot_file) print("\n--> END")
def main(): args = parser.parse_args() run = args.run_number prod_id = args.prod_version stat_events = args.statistics base_dir = args.base_dir sub_run = args.sub_run config_file = args.config pro_symlink = not args.no_pro_symlink print(f"\n--> Start calculating drs4 time corrections from run {run}") # verify config file if not config_file.exists(): raise IOError(f"Config file {config_file} does not exists. \n") print(f"\n--> Config file {config_file}") # verify input file r0_dir = args.r0_dir or Path(args.base_dir) / 'R0' input_file = find_r0_subrun(run, sub_run, r0_dir) date = input_file.parent.name print(f"\n--> Input file: {input_file}") # verify output dir calib_dir = base_dir / LEVEL_A_PIXEL_DIR output_dir = calib_dir / "drs4_time_sampling_from_FF" / date / prod_id if not output_dir.exists(): print(f"--> Create directory {output_dir}") output_dir.mkdir(parents=True, exist_ok=True) # update the default production directory if pro_symlink: pro = "pro" create_pro_symlink(output_dir) else: pro = prod_id run_summary_path = find_run_summary(date, args.base_dir) print(f"\n--> Use run summary {run_summary_path}") pedestal_file = find_pedestal_file(pro, args.pedestal_run, date=date, base_dir=args.base_dir) print(f"\n--> Pedestal file: {pedestal_file}") time_file = output_dir / f"time_calibration.Run{run:05d}.0000.h5" print(f"\n--> PRODUCING TIME CALIBRATION in {time_file} ...") cmd = [ "lstchain_data_create_time_calibration_file", f"--input-file={input_file}", f"--output-file={time_file}", f"--config={config_file}", f"--run-summary-path={run_summary_path}", f"--pedestal-file={pedestal_file}", f"--max-events={stat_events}", ] if args.no_progress: cmd.append("--no-progress") print("\n--> RUNNING...") subprocess.run(cmd, check=True) print("\n--> END")
def main(): args = parser.parse_args() run_list = args.run_list filters_list = args.filters_list ped_run = args.pedestal_run prod_id = args.prod_version stat_events = args.statistics base_dir = args.base_dir time_run = args.time_run sub_run_list = args.sub_run_list config_file = args.config sys_date = args.sys_date no_sys_correction = args.no_sys_correction yes = args.yes output_base_name = args.output_base_name calib_dir = base_dir / LEVEL_A_PIXEL_DIR if shutil.which('srun') is None: sys.exit(">>> This script needs a slurm batch system. Stop") print( f"\n--> Start reconstruct runs {run_list} and sub-runs {sub_run_list}") # verify config file if not config_file.exists(): sys.exit(f"Config file {config_file} does not exists. \n") print(f"\n--> Config file {config_file}") # for old runs or if the data-base is not available # it is possible to give the filter list if filters_list is not None and len(filters_list) != len(run_list): sys.exit( "Filter list length must be equal to run list length. Verify \n") r0_dir = args.r0_dir or Path(args.base_dir) / 'R0' # loops over runs and sub_runs and send jobs filters = None for i, run in enumerate(run_list): if filters_list is not None: filters = filters_list[i] for sub_run in sub_run_list: print(f"\n--> Run {run} and sub-run {sub_run}") input_file = find_r0_subrun(run, sub_run, r0_dir) print(f"--> Input file: {input_file}") date = input_file.parent.name # verify output dir output_dir = calib_dir / "calibration" / date / prod_id if not output_dir.exists(): print(f"--> Create directory {output_dir}") output_dir.mkdir(parents=True, exist_ok=True) # verify log dir log_dir = output_dir / "log" if not log_dir.exists(): print(f"--> Create directory {log_dir}\n") log_dir.mkdir(parents=True, exist_ok=True) # job file now = datetime.now().replace(microsecond=0).isoformat(sep='T') job_file = log_dir / f"run_{run}_subrun_{sub_run}_date_{now}.job" with job_file.open(mode="w") as fh: fh.write("#!/bin/bash\n") fh.write("#SBATCH --job-name=%s.job\n" % run) fh.write( "#SBATCH --output=log/run_%s_subrun_%s_date_%s.out\n" % (run, sub_run, now)) fh.write("#SBATCH --error=log/run_%s_subrun_%s_date_%s.err\n" % (run, sub_run, now)) fh.write("#SBATCH -p short\n") fh.write("#SBATCH --cpus-per-task=1\n") fh.write("#SBATCH --mem-per-cpu=10G\n") fh.write("#SBATCH -D %s \n" % output_dir) cmd = [ "srun", "onsite_create_calibration_file", f"-r {run}", f"-v {prod_id}", f"--sub_run={sub_run}", f"-b {base_dir}", f"-s {stat_events}", f"--output_base_name={output_base_name}", f"--config={config_file}", ] if ped_run is not None: cmd.append(f"--pedestal_run={ped_run}") if time_run is not None: cmd.append(f"--time_run={time_run}") if filters is not None: cmd.append(f"--filters={filters}") if sys_date is not None: cmd.append(f"--sys_date={sys_date}") if yes: cmd.append("--yes") if no_sys_correction: cmd.append("--no_sys_correction") # join command together with newline, line continuation and indentation fh.write(" \\\n ".join(cmd)) fh.write('\n') subprocess.run(["sbatch", job_file], check=True)
def main(): args = parser.parse_args() run = args.run_number prod_id = args.prod_version max_events = args.max_events base_dir = args.base_dir tel_id = args.tel_id yes = args.yes pro_symlink = not args.no_pro_symlink print(f"\n--> Start calculating DRS4 pedestals from run {run}\n") # verify input file r0_dir = args.r0_dir or Path(args.base_dir) / 'R0' input_file = find_r0_subrun(run, sub_run=0, r0_dir=r0_dir) date = input_file.parent.name # verify and make output dir calib_dir = base_dir / LEVEL_A_PIXEL_DIR output_dir = calib_dir / "drs4_baseline" / date / prod_id if not output_dir.exists(): print(f"--> Create directory {output_dir}") output_dir.mkdir(parents=True, exist_ok=True) # update the default production directory if pro_symlink: create_pro_symlink(output_dir) # make log dir log_dir = output_dir / "log" if not log_dir.exists(): print(f"--> Create directory {log_dir}") os.makedirs(log_dir, exist_ok=True) # define output file output_file = output_dir / f"drs4_pedestal.Run{run:05d}.0000.h5" if output_file.exists(): remove = False if not yes and os.getenv('SLURM_JOB_ID') is None: remove = query_yes_no( ">>> Output file exists already. Do you want to remove it?") if yes or remove: os.remove(output_file) else: print("\n--> Output file exists already. Stop") exit(1) # run lstchain script cmd = [ "lstchain_create_drs4_pedestal_file", f"--input={input_file}", f"--output={output_file}", f"--max-events={max_events}", ] if args.no_progress: cmd.append("--no-progress") subprocess.run(cmd, check=True) # plot and save some results plot_file = f"{output_dir}/log/drs4_pedestal.Run{run:05d}.0000.pdf" print(f"\n--> PRODUCING PLOTS in {plot_file} ...") drs4.plot_pedestals(input_file, output_file, run, plot_file, tel_id=tel_id, offset_value=400) print("\n--> END")