Пример #1
0
 def _build_param_dir(cls, instrument_name: str):
     path = os.path.join(p.param_dir, "instruments")
     u.mkdir_check(path)
     u.debug_print(2, "Instrument._build_param_dir(): instrument_name ==", instrument_name)
     path = os.path.join(path, instrument_name)
     u.mkdir_check(path)
     return path
def main(data_title, sextractor_path, origin, destination):
    properties = p.object_params_imacs(data_title)
    data_dir = properties['data_dir']
    if sextractor_path is not None:
        if not os.path.isdir(sextractor_path):
            os.mkdir(sextractor_path)
        do_sextractor = True
        ap_diams_sex = p.load_params(f'param/aperture_diameters_fors2')
    else:
        do_sextractor = False

    origin_path = data_dir + origin
    destination_path = data_dir + destination
    u.mkdir_check(destination_path)
    filters = next(os.walk(origin_path))[1]

    for fil in filters:
        u.mkdir_check(destination_path + fil)
        if do_sextractor:
            if not os.path.isdir(sextractor_path + fil):
                os.mkdir(sextractor_path + fil)
        files = os.listdir(origin_path + fil + "/")
        for file_name in files:
            if file_name[-5:] == '.fits':
                science_origin = origin_path + fil + "/" + file_name
                science_destination = destination_path + fil + "/" + file_name
                print(science_origin)
                # Divide by exposure time to get an image in counts/second.
                f.divide_by_exp_time(file=science_origin,
                                     output=science_destination)
                if do_sextractor:
                    copyfile(science_origin,
                             sextractor_path + fil + "/" + file_name)

        # Write a sextractor file for photometric testing of the data from the upper chip.
        if do_sextractor:
            # Write a csv table of file properties to each filter directory.
            tbl = f.fits_table(input_path=sextractor_path + fil,
                               output_path=sextractor_path + fil + "/" + fil +
                               "_fits_tbl.csv",
                               science_only=False)
            # TODO: Rewrite to use psf-fitting (in FORS2 pipeline as well)
            for i, d in enumerate(ap_diams_sex):
                f.write_sextractor_script(table=tbl,
                                          output_path=sextractor_path + fil +
                                          "/sextract_aperture_" + str(d) +
                                          ".sh",
                                          sex_params=['c', 'PHOT_APERTURES'],
                                          sex_param_values=['im.sex',
                                                            str(d)],
                                          cat_name='sextracted_' + str(d),
                                          cats_dir='aperture_' + str(d),
                                          criterion='chip',
                                          value='CHIP1')

    if os.path.isfile(origin_path + data_title + '.log'):
        copyfile(origin_path + data_title + '.log',
                 destination_path + data_title + ".log")
    u.write_log(path=destination_path + data_title + ".log",
                action=f'Astrometry solved using 3-astrometry.py')
Пример #3
0
def check_for_config():
    u.mkdir_check(config_dir)
    p = load_params(config_file)
    if p is None:
        config_text = pkg_resources.resource_string(
            __name__, os.path.join("..", f"param",
                                   "config_template.yaml")).decode()
        print(type(config_text))
        config_text = config_text.replace(
            "proj_dir: <some_directory>/craft-optical-followup/",
            f"proj_dir: {os.getcwd()}/")

        with open(config_file, "w") as cfg:
            cfg.write(config_text)

        print(f"No config file was detected at {config_file}.")
        print(f"A fresh config file has been created at '{config_file}'")
        print(
            "In this file, please set 'top_data_dir' to a valid path in which to store all "
            "data products of this package (This may require a large amount of space.)."
        )
        print("You may also like to specify an alternate param_dir")

        input("\nOnce you have edited this file, press any key to proceed.")
        p = load_params(config_file)
    else:
        for param in p:
            p[param] = u.check_trailing_slash(p[param])
        save_params(config_file, p)
        yaml_to_json(config_file)
    return p
def main(comb_path, output_dir, obj, sextractor_path, path_suffix):
    print("\nExecuting Python script pipeline_fors2/7-trim_combined.py, with:")
    print(f"\tepoch {obj}")
    print(f"\toutput directory {output_dir}")
    print(f"\tsextractor directory {sextractor_path}")
    print()

    if sextractor_path is not None:
        u.mkdir_check_nested(sextractor_path)
        do_sextractor = True
    else:
        do_sextractor = False

    # Build a list of the filter prefixes used.
    fils = []
    files = list(filter(lambda x: x[-4:] == '.tbl', os.listdir(comb_path)))
    for file in files:
        if file[0] not in fils:
            fils.append(str(file[0]))

    for fil in fils:
        if do_sextractor:
            u.mkdir_check(sextractor_path)
        area_file = fil + "_coadded_area.fits"
        comb_file = fil + "_coadded.fits"

        left, right, bottom, top = f.detect_edges_area(comb_path + area_file)
        # Trim a little extra to be safe.
        left = left + 5
        right = right - 5
        top = top - 5
        bottom = bottom + 5

        f.trim_file(comb_path + comb_file,
                    left=left,
                    right=right,
                    top=top,
                    bottom=bottom,
                    new_path=output_dir + "/" + comb_file)
        # Keep a trimmed version of the area file, it comes in handy later.
        f.trim_file(comb_path + area_file,
                    left=left,
                    right=right,
                    top=top,
                    bottom=bottom,
                    new_path=output_dir + "/" + area_file)
        if do_sextractor:
            copyfile(output_dir + "/" + comb_file, sextractor_path + comb_file)

        if path_suffix is None:
            path_suffix = output_dir.split("/")[-2]

        path_suffix = u.remove_trailing_slash(path_suffix)

        p.add_output_path(obj=obj,
                          instrument='fors2',
                          key=fil[0] + '_trimmed_image' + path_suffix,
                          path=output_dir + "/" + comb_file)
Пример #5
0
 def check_data_path(self):
     if self.field is not None:
         u.debug_print(2, "", self.name)
         # print(self.field.data_path, self.name_filesys)
         self.data_path = os.path.join(self.field.data_path, "objects", self.name_filesys)
         u.mkdir_check(self.data_path)
         self.output_file = os.path.join(self.data_path, f"{self.name_filesys}_outputs.yaml")
         return True
     else:
         return False
def main(data_title: str, origin: str, destination: str, redo: bool = False):
    properties = p.object_params_imacs(data_title)
    path = properties['data_dir']

    origin_path = path + origin
    astrometry_path = path + destination
    u.mkdir_check(astrometry_path)

    keys = params.load_params('param/keys')
    key = keys['astrometry']

    reduced_list = os.listdir(origin_path)
    astrometry_list = os.listdir(astrometry_path)

    if redo:
        to_send = list(filter(lambda f: f[-5:] == '.fits', reduced_list))
    else:
        to_send = list(
            filter(lambda f: f[-5:] == '.fits' and f not in astrometry_list,
                   reduced_list))

    filters = list(filter(lambda f: os.path.isdir(f), os.listdir(origin)))

    for f in filters:
        reduced_path_filter = origin_path + f + '/'
        astrometry_path_filter = astrometry_path + f + '/'
        print(f'To send to Astrometry.net from {f}:')
        for file in to_send:
            print('\t' + file)

        for file in to_send:
            hdu = fits.open(origin_path + file)
            header = hdu[0].header
            ra = header['RA-D']
            dec = header['DEC-D']
            scale_upper = header['SCALE'] + 0.1
            scale_lower = header['SCALE'] - 0.1
            hdu.close()
            print('Sending to Astrometry.net:', file)
            os.system(f'python /astrometry-client.py '
                      f'--apikey {key} '
                      f'-u {reduced_path_filter}{file} '
                      f'-w '
                      f'--newfits {astrometry_path_filter}{file} '
                      f'--ra {ra} --dec {dec} --radius {1.} '
                      f'--scale-upper {scale_upper} '
                      f'--scale-lower {scale_lower} '
                      f'--private --no_commercial')

    if os.path.isfile(origin_path + data_title + '.log'):
        shutil.copy(origin_path + data_title + '.log',
                    astrometry_path + data_title + ".log")
    u.write_log(path=astrometry_path + data_title + ".log",
                action=f'Astrometry solved using 3-astrometry.py')
def main(ob, path):

    print("\nExecuting Python script pipeline_fors2/9-esorex_zeropoint_prep.py, with:")
    print(f"\tepoch {ob}")
    print(f"\tcalibration data path {path}")
    print()

    output = object_output_params(obj=ob, instrument='FORS2')
    filters = output['filters']
    for f in filters:
        mkdir_check(path + '/' + f)
Пример #8
0
def main(field, destination, epoch, force_subtract_better_seeing, subtraction_type, template_instrument):
    if destination[-1] != '/':
        destination = destination + '/'

    params = p.object_params_frb(field)
    destination_path = f'{params["data_dir"]}subtraction/{destination}/'
    u.mkdir_check(destination_path)

    params = p.object_params_frb(field)
    template_epoch = params['template_epoch_' + template_instrument.lower()]

    comparison_title = f'{field}_{epoch}'
    template_title = f'{field}_{template_epoch}'

    filters = params['filters']

    for f in filters:
        f_0 = f[0]

        destination_path_filter = f'{destination_path}{f}/'
        u.mkdir_check(destination_path_filter)

        template_params = p.load_params(destination_path_filter + f'{template_title}_template_output_values.yaml')
        comparison_params = p.load_params(destination_path_filter + f'{comparison_title}_comparison_output_values.yaml')

        if f_0 + '_fwhm_arcsec' in template_params:
            fwhm_template = template_params[f_0 + '_fwhm_arcsec']
        elif f_0.lower() + '_fwhm_arcsec' in template_params:
            fwhm_template = template_params[f_0.lower() + '_fwhm_arcsec']
        else:
            raise ValueError(f_0 + '_fwhm_arcsec or ' + f_0.lower() + '_fwhm_arcsec not found for template image.')

        if f_0 + '_fwhm_arcsec' in comparison_params:
            fwhm_comparison = comparison_params[f_0 + '_fwhm_arcsec']
        elif f_0.lower() + '_fwhm_arcsec' in comparison_params:
            fwhm_comparison = comparison_params[f_0.lower() + '_fwhm_arcsec']
        else:
            raise ValueError(f_0 + '_fwhm_arcsec or ' + f_0.lower() + '_fwhm_arcsec not found for comparison image.')

        template = destination_path_filter + f'{template_title}_template_aligned.fits'
        comparison = destination_path_filter + f'{comparison_title}_comparison_aligned.fits'

        _, difference = ph.subtract(template_origin=template,
                                    comparison_origin=comparison,
                                    output=destination_path_filter,
                                    template_fwhm=fwhm_template,
                                    comparison_fwhm=fwhm_comparison,
                                    force_subtract_better_seeing=force_subtract_better_seeing,
                                    comparison_title=comparison_title,
                                    template_title=template_title,
                                    field=field, comparison_epoch=epoch, template_epoch=template_epoch)
Пример #9
0
def main(data_title: str):
    properties = p.object_params_sdss(data_title)
    path = properties['data_dir']

    data_path = path + '/0-data/'

    u.mkdir_check(data_path)
    os.listdir(path)

    print(path)

    for file in filter(lambda fil: fil[-5:] == '.fits', os.listdir(path)):
        f = file[-6]
        shutil.copy(path + file, data_path + str(f) + '_cutout.fits')
Пример #10
0
def main(data_title: str):
    properties = p.object_params_des(data_title)
    path = properties['data_dir']

    data_path = path + '/0-data/'

    u.mkdir_check(data_path)
    os.listdir(path)

    print(path)

    for file in filter(lambda fil: fil[-5:] == '.fits', os.listdir(path)):
        f = file[-6]
        shutil.copy(path + file, data_path + str(f) + '_cutout.fits')

        p.add_output_path(obj=data_title, instrument='DES', key=f + '_subtraction_image', path=data_path + str(f) + '_cutout.fits')
Пример #11
0
def test_mkdir_check():
    path = os.path.join(test_file_path, "path_test")
    u.rm_check(path)
    u.mkdir_check(path)
    assert os.path.isdir(path)
    u.rmtree_check(path)

    paths = [
        os.path.join(test_file_path, "test_path_1"),
        os.path.join(test_file_path, "test_path_2"),
        os.path.join(test_file_path, "test_path_3")
    ]
    u.mkdir_check_args(*paths)
    for path in paths:
        assert os.path.isdir(path)
        u.rmtree_check(path)
def source_extractor(
        image_path: str,
        output_dir: str = None,
        configuration_file: str = None,
        parameters_file: str = None,
        catalog_name: str = None,
        copy_params: bool = True,
        template_image_path: str = None,
        **configs):
    """
    :param configs: Any source-extractor (sextractor) parameter, normally read via the config file but that can be
    overridden by passing to the shell command, can be given here.
    """

    if "gain" in configs:
        configs["gain"] = u.check_quantity(number=configs["gain"], unit=gain_unit).to(gain_unit).value

    old_dir = os.getcwd()
    if output_dir is None:
        output_dir = os.getcwd()
    else:
        u.mkdir_check(output_dir)
        os.chdir(output_dir)

    if copy_params:
        os.system(f"cp {os.path.join(p.path_to_config_psfex(), '*')} .")

    sys_str = "source-extractor "
    if template_image_path is not None:
        sys_str += f"{template_image_path},"
    sys_str += image_path + " "
    if configuration_file is not None:
        sys_str += f" -c {configuration_file}"
    if catalog_name is None:
        image_name = os.path.split(image_path)[-1]
        catalog_name = f"{image_name}.cat"
    sys_str += f" -CATALOG_NAME {catalog_name}"
    if parameters_file is not None:
        sys_str += f" -PARAMETERS_NAME {parameters_file}"
    for param in configs:
        sys_str += f" -{param.upper()} {configs[param]}"
    u.system_command_verbose(sys_str, error_on_exit_code=True)
    catalog_path = os.path.join(output_dir, catalog_name)
    os.chdir(old_dir)
    return catalog_path
def main(obj, test, magnitude):
    print('Got here.')
    properties = p.object_params_fors2(obj)

    synth_path = properties['data_dir'] + 'synthetic/'
    u.mkdir_check(synth_path)
    synth_path = synth_path + 'frb_position/'
    u.mkdir_check(synth_path)

    now = time.Time.now()
    now.format = 'isot'
    test = str(now) + '_' + test
    test_path = synth_path + test + '/'

    print(properties['filters'])

    ph.insert_synthetic_at_frb(obj=obj,
                               test_path=test_path,
                               filters=properties['filters'],
                               magnitudes=[magnitude, magnitude],
                               add_path=True)
Пример #14
0
def generate_astrometry_indices(cat_name: str,
                                cat: Union[str, table.Table],
                                output_file_prefix: str,
                                unique_id_prefix: int,
                                index_output_dir: str,
                                fits_cat_output: str = None,
                                p_lower: int = 0,
                                p_upper: int = 2):
    u.mkdir_check(index_output_dir)
    cat_name = cat_name.lower()
    if fits_cat_output is None and isinstance(cat, str):
        if cat.endswith(".csv"):
            fits_cat_output = cat.replace(".csv", ".fits")
        else:
            fits_cat_output = cat + ".fits"
    elif fits_cat_output is None:
        raise ValueError(
            "fits_cat_output must be provided if cat is given as a Table instead of a path."
        )
    cat = u.path_or_table(cat, fmt="ascii.csv")
    cols = cat_columns(cat=cat_name, f="rank")
    cat.write(fits_cat_output, format='fits', overwrite=True)
    unique_id_prefix = str(unique_id_prefix)
    for scale in range(p_lower, p_upper + 1):
        unique_id = unique_id_prefix + str(scale).replace("-", "0")
        unique_id = int(unique_id)
        output_file_name_scale = f"{output_file_prefix}_{scale}"
        try:
            astrometry_net.build_astrometry_index(
                input_fits_catalog=fits_cat_output,
                unique_id=unique_id,
                output_index=os.path.join(index_output_dir,
                                          output_file_name_scale),
                scale_number=scale,
                sort_column=cols["mag_auto"],
                scan_through_catalog=True)
        except SystemError:
            print(f"Building index for scale {scale} failed.")
Пример #15
0
def main(data_title, sextractor_path, origin, destination):
    properties = p.object_params_xshooter(data_title)
    data_dir = properties['data_dir']
    if sextractor_path is not None:
        if not os.path.isdir(sextractor_path):
            os.mkdir(sextractor_path)
        do_sextractor = True
    else:
        do_sextractor = False

    origin_path = data_dir + origin
    destination_path = data_dir + destination
    u.mkdir_check(destination_path)
    filters = next(os.walk(origin_path))[1]

    for fil in filters:
        u.mkdir_check(destination_path + fil)
        if do_sextractor:
            if not os.path.isdir(sextractor_path + fil):
                os.mkdir(sextractor_path + fil)
        files = os.listdir(origin_path + fil + "/")
        for file_name in files:
            if file_name[-5:] == '.fits':
                science_origin = origin_path + fil + "/" + file_name
                science_destination = destination_path + fil + "/" + file_name
                print(science_origin)
                # Divide by exposure time to get an image in counts/second.
                f.divide_by_exp_time(file=science_origin,
                                     output=science_destination)
                if do_sextractor:
                    copyfile(science_origin,
                             sextractor_path + fil + "/" + file_name)

    if os.path.isfile(origin_path + data_title + '.log'):
        copyfile(origin_path + data_title + '.log',
                 destination_path + data_title + ".log")
    u.write_log(path=destination_path + data_title + ".log",
                action=f'Divided by exposure time.')
Пример #16
0
def sort_by_filter(path: 'str'):
    if path[-1] != "/":
        path = path + "/"
    files = os.listdir(path)
    filters = {}
    # Gather the information we need.
    for file in files:
        if file[-5:] == ".fits":
            filter = get_filter(path + file)
            if type(filter) is list:
                filter = filter[0]
            if filter not in filters:
                filters[filter] = []
            filters[filter].append(file)
    # Do sort
    for filter in filters:
        # Create a directory for each filter
        filter_path = path + filter + "/"
        u.mkdir_check(filter_path)
        # Move all the files with that filter
        for file in filters[filter]:
            if os.path.isfile(filter_path + file):
                os.remove(filter_path + file)
            sh.move(path + file, filter_path)
def main(obj, test, curves_path):
    properties = p.object_params_fors2(obj)

    if curves_path[-1] != '/':
        curves_path += '/'

    mag_table_file = filter(lambda file: file[-4:] == '.csv',
                            os.listdir(curves_path)).__next__()
    mag_table = table.Table.read(curves_path + mag_table_file)

    filters = mag_table.colnames.copy()
    filters.remove('model')

    synth_path = properties['data_dir'] + 'synthetic/'
    u.mkdir_check(synth_path)
    synth_path = synth_path + 'frb_position/'
    u.mkdir_check(synth_path)

    now = time.Time.now()
    now.format = 'isot'
    synth_path += f'sn_models_{now}/'
    u.mkdir_check(synth_path)

    for row in mag_table:
        model = row['model']
        magnitudes = []
        for f in filters:
            magnitudes.append(row[f])
        test_spec = test + '_' + model
        test_path = synth_path + test_spec + '/'
        ph.insert_synthetic_at_frb(obj=obj,
                                   test_path=test_path,
                                   filters=filters,
                                   magnitudes=magnitudes,
                                   add_path=False)

    for f in filters:
        p.add_output_path(obj=obj,
                          key=f[0] + '_subtraction_image_synth_frb_sn_models',
                          path=synth_path)
Пример #18
0
def main(data_title, origin, destination):
    properties = p.object_params_xshooter(data_title)
    path = properties['data_dir']

    destination = path + destination
    u.mkdir_check(destination)
    origin = path + origin
    u.mkdir_check(origin)

    dirs = next(os.walk(origin))[1]

    left = 27
    right = 537
    top = 526
    bottom = 15

    for fil in dirs:
        u.mkdir_check(destination + fil)
        print('HERE:')
        print(origin + fil)
        files = os.listdir(origin + fil)
        files.sort()

        for i, file in enumerate(files):
            # Split the files into upper CCD and lower CCD, with even-numbered being upper and odd-numbered being lower
            new_path = destination + fil + "/" + file.replace(
                ".fits", "_trim.fits")
            # Add GAIN and SATURATE keywords to headers.
            path = origin + fil + "/" + file
            f.trim_file(path,
                        left=left,
                        right=right,
                        top=top,
                        bottom=bottom,
                        new_path=new_path)

    copyfile(origin + data_title + ".log", destination + data_title + ".log")
    u.write_log(path=destination + data_title + ".log",
                action='Edges trimmed using 4-trim.py\n')
def main(obj, test, mag_min, mag_max, increment, instrument):
    properties = p.object_params_instrument(obj, instrument=instrument)
    output = p.object_output_params(obj=obj, instrument=instrument)
    paths = p.object_output_paths(obj=obj, instrument=instrument)

    synth_path = properties['data_dir'] + 'synthetic/'
    u.mkdir_check(synth_path)
    synth_path = synth_path + 'frb_position/'
    u.mkdir_check(synth_path)

    now = time.Time.now()
    now.format = 'isot'
    synth_path += f'range_{now}/'
    u.mkdir_check(synth_path)

    filters = output['filters']

    for magnitude in np.arange(mag_min, mag_max, increment):
        magnitudes = []
        for i in range(len(filters)):
            magnitudes.append(magnitude)
        test_spec = test + '_' + str(u.round_to_sig_fig(magnitude, 4))
        test_path = synth_path + test_spec + '/'
        ph.insert_synthetic_at_frb(obj=properties,
                                   test_path=test_path,
                                   filters=filters,
                                   magnitudes=magnitudes,
                                   add_path=False,
                                   psf=True,
                                   output_properties=output,
                                   instrument=instrument,
                                   paths=paths)

    p.add_output_path(obj=obj,
                      key='subtraction_image_synth_frb_range',
                      path=synth_path,
                      instrument=instrument)
Пример #20
0
def standard_script(
        input_directory: str,
        output_directory: str,
        output_file_name: str = None,
        ignore_differences: bool = False,
        coadd_types: Union[List[str], str] = 'median',
        # unit="electron / second",
        do_inject_header: bool = True,
        **kwargs):
    """
    Does a standard median coaddition of fits files in input_directory.
    Adapted from an example bash script found at http://montage.ipac.caltech.edu/docs/first_mosaic_tutorial.html
    :param input_directory: path to directory containing input images.
    :param output_directory: path to directory to write data products to; will be created if it doesn't exist.
    :param output_file_name: Name of final coadded image file.
    :param ignore_differences: If False, checks if input images have compatible exposure times, filter, etc. and raises ValueError if not.
    :return:
    """
    u.mkdir_check(output_directory)
    old_dir = os.getcwd()

    proj_dir = os.path.join(output_directory, "projdir")
    diff_dir = os.path.join(output_directory, "diffdir")
    corr_dir = os.path.join(output_directory, "corrdir")
    u.mkdir_check(proj_dir, diff_dir, corr_dir)

    os.chdir(output_directory)
    print("Creating directories to hold processed images.")

    proj_dir = "projdir"
    diff_dir = "diffdir"
    corr_dir = "corrdir"

    if not ignore_differences:
        print("Checking input images...")
        check_input_images(input_directory=input_directory, **kwargs)

    print("Creating metadata tables of the input images.")
    table_path = "images.tbl"
    image_table(input_directory=input_directory, output_path=table_path)

    print("Creating FITS headers describing the footprint of the mosaic.")
    header_path = "template.hdr"
    make_header(table_path=table_path, output_path=header_path)

    print("Reprojecting input images.")
    stats_table_path = "stats.tbl"
    project_execute(input_directory=input_directory,
                    table_path=table_path,
                    header_path=header_path,
                    proj_dir=proj_dir,
                    stats_table_path=stats_table_path)

    print("Creating metadata table of the reprojected images.")
    reprojected_table_path = "proj.tbl"
    image_table(input_directory=proj_dir, output_path=reprojected_table_path)

    print("Analyzing the overlaps between images.")
    difference_table_path = "diffs.tbl"
    fit_table_path = "fits.tbl"
    overlaps(table_path=reprojected_table_path,
             difference_table_path=difference_table_path)
    difference_execute(input_directory=proj_dir,
                       difference_table_path=difference_table_path,
                       header_path=header_path,
                       diff_dir=diff_dir)
    fit_execute(difference_table_path=difference_table_path,
                fit_table_path=fit_table_path,
                diff_dir=diff_dir)

    print(
        "Performing background modeling and compute corrections for each image."
    )
    corrections_table_path = "corrections.tbl"
    background_model(table_path=reprojected_table_path,
                     fit_table_path=fit_table_path,
                     correction_table_path=corrections_table_path)

    print("Applying corrections to each image")
    background_execute(input_directory=proj_dir,
                       table_path=reprojected_table_path,
                       correction_table_path=corrections_table_path,
                       corr_dir=corr_dir)

    if isinstance(coadd_types, str):
        coadd_types = [coadd_types]

    file_paths = []

    for i, coadd_type in enumerate(coadd_types):
        print(f"Coadding the images with {coadd_type}.")
        if output_file_name is None:
            output_file_name = "coadded.fits"
        output_file_name_coadd = output_file_name.replace(
            ".fits", f"_{coadd_type}.fits")

        add(input_directory=corr_dir,
            coadd_type=coadd_type,
            table_path=reprojected_table_path,
            header_path=header_path,
            output_path=output_file_name_coadd)

        if do_inject_header:
            inject_header(file_path=output_file_name_coadd,
                          input_directory=input_directory,
                          coadd_type=coadd_type)

        file_paths.append(
            os.path.join(output_directory, output_file_name_coadd))

    os.chdir(old_dir)

    u.debug_print(1, "montage.standard_script():", file_paths)

    return file_paths
Пример #21
0
def main(obj, test, n, filter_dist, instrument, limit):
    properties = p.object_params_instrument(obj, instrument=instrument)
    burst_properties = p.object_params_frb(obj=obj[:-2])
    output = p.object_output_params(obj=obj, instrument=instrument)
    paths = p.object_output_paths(obj=obj, instrument=instrument)

    z = burst_properties['z']
    mjd_burst = burst_properties['mjd_burst']
    ebv_mw = burst_properties['dust_ebv']

    mjd_obs = properties['mjd']
    synth_path = properties['data_dir'] + 'synthetic/'
    u.mkdir_check(synth_path)
    synth_path = synth_path + 'sn_random/'
    u.mkdir_check(synth_path)

    epoch = mjd_obs - mjd_burst

    f_0 = filter_dist[0]

    hg_ra = burst_properties['hg_ra']
    hg_dec = burst_properties['hg_dec']

    burst_ra = burst_properties['burst_ra']
    burst_dec = burst_properties['burst_dec']

    image_path = paths[f_0 + '_' + properties['subtraction_image']]

    image = fits.open(image_path)

    wcs_info = wcs.WCS(image[0].header)

    burst_x, burst_y = wcs_info.all_world2pix(burst_ra, burst_dec, 0)

    now = time.Time.now()
    now.format = 'isot'
    synth_path += test + '_' + str(now) + '/'
    u.mkdir_check(synth_path)

    filters = output['filters']

    psf_models = []
    for i, f in enumerate(filters):
        sn.register_filter(f=f, instrument=instrument)
        psf_model = fits.open(paths[f[0] + '_psf_model'])
        psf_models.append(psf_model)

    for i in range(n):
        test_spec = test + '_' + str(i)
        test_path = synth_path + test_spec + '/'

        magnitudes = []
        mags_filters, model, x, y, tbl = sn.random_light_curves_type_ia(
            filters=filters,
            image=image,
            hg_ra=hg_ra,
            hg_dec=hg_dec,
            z=z,
            ebv_mw=ebv_mw,
            output_path=test_path,
            output_title=test,
            limit=limit,
            x=burst_x,
            y=burst_y,
            ra=burst_ra,
            dec=burst_dec)
        days = mags_filters['days']

        for f in filters:
            magnitude = sn.magnitude_at_epoch(epoch=epoch,
                                              days=days,
                                              mags=mags_filters[f])
            print(f, 'mag:', magnitude)
            magnitudes.append(magnitude)

        ph.insert_synthetic(x=float(x),
                            y=float(y),
                            obj=properties,
                            test_path=test_path,
                            filters=filters,
                            magnitudes=magnitudes,
                            suffix='sn_random_random_ia',
                            extra_values=tbl,
                            paths=paths,
                            output_properties=output,
                            psf_models=psf_models,
                            instrument=instrument)

    p.add_output_path(obj=obj,
                      key='subtraction_image_synth_sn_random_ia',
                      path=synth_path,
                      instrument=instrument)
Пример #22
0
def main(epoch, origin, destination):
    print("\nExecuting Python script pipeline_fors2/4.1-insert_test_synth.py, with:")
    print(f"\tepoch {epoch}")
    print(f"\torigin directory {origin}")
    print(f"\tdestination directory {destination}")
    print()

    epoch_params = p.object_params_fors2(obj=epoch)
    outputs = p.object_output_params(obj=epoch, instrument='FORS2')

    data_dir = epoch_params['data_dir']

    insert = epoch_params['test_synths']

    origin_path = data_dir + "analysis/sextractor/" + origin
    destination_path = data_dir + destination

    u.mkdir_check(destination_path)
    u.mkdir_check(destination_path + "science/")
    u.mkdir_check(destination_path + "backgrounds/")

    filters = outputs['filters']

    for fil in filters:
        f = fil[0]
        path_fil_output = destination_path + "science/" + fil + "/"
        path_fil_input = origin_path + fil + "/"
        u.mkdir_check(path_fil_output)
        u.mkdir_check(destination_path + "backgrounds/" + fil)
        zeropoint, _, airmass, _, extinction, _ = ph.select_zeropoint(obj=epoch,
                                                                      filt=fil,
                                                                      instrument='fors2',
                                                                      outputs=outputs)

        print(path_fil_input)
        # print(os.listdir(path_fil_input))

        for fits_file in filter(lambda f: f.endswith("_norm.fits"), os.listdir(path_fil_input)):
            print(fits_file)
            path_fits_file_input = path_fil_input + fits_file
            path_fits_file_output = path_fil_output + fits_file
            path_psf_model = path_fits_file_input.replace(".fits", "_psfex.psf")

            try:
                ph.insert_point_sources_to_file(file=path_fits_file_input,
                                                x=array(insert["ra"]),
                                                y=array(insert["dec"]),
                                                mag=insert[f"{f}_mag"],
                                                output=path_fits_file_output,
                                                zeropoint=zeropoint,
                                                extinction=extinction,
                                                airmass=airmass,
                                                world_coordinates=True,
                                                psf_model=path_psf_model
                                                )
            except ValueError:
                ph.insert_point_sources_to_file(file=path_fits_file_input,
                                                x=array(insert["ra"]),
                                                y=array(insert["dec"]),
                                                mag=insert[f"{f}_mag"],
                                                output=path_fits_file_output,
                                                zeropoint=zeropoint,
                                                extinction=extinction,
                                                airmass=airmass,
                                                world_coordinates=True,
                                                fwhm=fits.open(path_psf_model)[1].header['PSF_FWHM']
                                                )

    if os.path.isfile(origin_path + epoch + '.log'):
        copyfile(origin_path + epoch + '.log', destination_path + epoch + ".log")
    u.write_log(path=destination_path + epoch + ".log", action=f'Divided by exposure time.')
Пример #23
0
def main(field, destination, epoch, instrument, template_instrument,
         comparison_type):
    if destination[-1] != '/':
        destination = destination + '/'

    p.refresh_params_frbs()
    types = ['normal', 'synth_random', 'synth_frb']
    if comparison_type not in types:
        raise ValueError(comparison_type +
                         ' is not a valid synthetic argument; choose from ' +
                         str(types))
    if comparison_type == 'normal':
        comparison_type = ''
    else:
        comparison_type = '_' + comparison_type

    params = p.object_params_frb(field)
    u.mkdir_check(f'{params["data_dir"]}subtraction/')
    destination_path = f'{params["data_dir"]}subtraction/{destination}/'
    u.mkdir_check(destination_path)

    comparison_title = f'{field}_{epoch}'

    comparison_paths = p.object_output_paths(obj=comparison_title,
                                             instrument=instrument)
    comparison_params = p.object_params_instrument(obj=comparison_title,
                                                   instrument=instrument)

    params = p.object_params_frb(field)
    template_epoch = params['template_epoch_' + template_instrument.lower()]

    template_title = f'{field}_{template_epoch}'
    template_paths = p.object_output_paths(obj=template_title,
                                           instrument=template_instrument)
    template_outputs = p.object_output_params(obj=template_title,
                                              instrument=template_instrument)
    template_params = p.object_params_instrument(
        obj=template_title, instrument=template_instrument)

    filters = params['filters']
    for f in filters:

        values = {}

        f_0 = f[0]
        destination_path_filter = f'{destination_path}{f}/'
        u.mkdir_check(destination_path_filter)

        # COMPARISON IMAGE:

        comparison_image_name = comparison_params[
            'subtraction_image'] + comparison_type

        # Get path to comparison image from parameter .yaml file
        if f'{f_0}_{comparison_image_name}' in comparison_paths:
            comparison_origin = comparison_paths[
                f'{f_0}_{comparison_image_name}']
        elif f'{f_0.lower()}_{comparison_image_name}' in comparison_paths:
            comparison_origin = comparison_paths[
                f'{f_0.lower()}_{comparison_image_name}']
        else:
            raise ValueError(
                f'{f_0.lower()}_{comparison_image_name} not found in {comparison_title} paths'
            )

        comparison_destination = f'{comparison_title}_comparison.fits'

        if comparison_type != '':
            shutil.copyfile(
                comparison_origin.replace('.fits',
                                          '.csv'), destination_path_filter +
                comparison_destination.replace('.fits', '.csv'))

        print('Copying comparison image')
        print('From:')
        print('\t', comparison_origin)

        print('To:')
        print(f'\t {destination_path}{f}/{comparison_destination}')
        shutil.copy(
            comparison_params['data_dir'] + 'output_values.yaml',
            f'{destination_path}{f}/{comparison_title}_comparison_output_values.yaml'
        )
        shutil.copy(
            comparison_params['data_dir'] + 'output_values.json',
            f'{destination_path}{f}/{comparison_title}_comparison_output_values.json'
        )
        shutil.copy(comparison_origin,
                    f'{destination_path}{f}/{comparison_destination}')
        values['comparison_file'] = comparison_origin

        # TEMPLATE IMAGE

        if template_instrument != 'FORS2' and template_instrument != 'XSHOOTER':
            f_0 = f_0.lower()

        template_image_name = template_params[
            'subtraction_image'] + comparison_type
        if f'{f_0}_{template_image_name}' in template_paths:
            template_origin = template_paths[f'{f_0}_{template_image_name}']
        elif f'{f_0.lower()}_{template_image_name}' in template_paths:
            template_origin = template_paths[
                f'{f_0.lower()}_{template_image_name}']
        else:
            raise ValueError(
                f'{f_0.lower()}_{template_image_name} not found in {template_title} paths'
            )
        fwhm_template = template_outputs[f_0 + '_fwhm_pix']
        template_destination = f'{template_title}_template.fits'

        print('Copying template')
        print('From:')
        print('\t', template_origin)
        print('To:')
        print(f'\t {destination_path}{f}/{template_destination}')
        shutil.copy(
            template_params['data_dir'] + 'output_values.yaml',
            f'{destination_path}{f}/{template_title}_template_output_values.yaml'
        )
        shutil.copy(
            template_params['data_dir'] + 'output_values.json',
            f'{destination_path}{f}/{template_title}_template_output_values.json'
        )
        shutil.copy(template_origin,
                    f'{destination_path}{f}/{template_destination}')
        values['template_file'] = comparison_origin

        p.add_params(f'{destination_path}{f}/output_values.yaml', values)
Пример #24
0
def light_curves_salt2(z: float, filters: list, peak: float = None, days: Union[tuple, list, np.ndarray] = (0, 85),
                       show: bool = True, rise_time: float = None, ebv_mw: float = 0, ebv_host: float = 0.,
                       x1: float = None, output_path: str = None, output_title: str = None, day_markers: list = None,
                       fil_peak='bessellb', r_v: float = 2.3):
    """ Produces light curves, in the provided filters, for a Type Ia supernova using the SALT2 models as implemented 
    in sncosmo. 
    :param z: Redshift of source.
    :param filters: Filters to obtain light curves in. Must be in the sncosmo Registry.
    :param peak: Peak (ie lowest) absolute magnitude, in the filter given by fil_peak, to calibrate curves to.
    :param days: Either an array of times (in days) to calculate the light curves over, or a tuple describing the range
        of days, ie (first_day, last_day)
    :param show: Show plot onscreen?
    :param rise_time: Rest-frame time, from beginning of SN to peak magnitude, in days.
    :param ebv_mw: Reddening parameter E(B-V), using S&F11 law, for the Milky Way along the SN's line-of-sight.
    :param ebv_host: Reddening parameter E(B-V), using S&F11 law, for the host galaxy.
    :param x1: SALT2 light curve stretch parameter. See SALT2 documentation for further information.
    :param output_path: Path to which to save output. If None, does not save.
    :param output_title: Title to give output plot and table.
    :param day_markers: List of times (in days) to mark on plot, eg observation dates.
    :param fil_peak: Filter in which to set the peak absolute magnitude; usually reported in B or V.
    :return: mag_table, model
        mag_table: an astropy.table.Table with the times, in days, and the magnitudes in each filter.
        model: the entire sncosmo model instance.
    """
    if output_path is not None and output_path[-1] != '/':
        output_path += '/'
    u.mkdir_check(output_path)
    # Find time at which model begins and time of peak.
    t_first, t_peak = find_model_times(source='salt2-extended', z=z, fil=fil_peak, show=False)
    # If both are None, the model is invalid over these wavelengths
    if t_first is t_peak is None:
        return None, None
    # Set up model in sncosmo.
    dust_mw = sncosmo.F99Dust()
    dust_host = sncosmo.CCM89Dust()
    model = sncosmo.Model(source='salt2-extended', effects=[dust_mw, dust_host],
                          effect_names=['mw', 'host'],
                          effect_frames=['obs', 'rest'])
    model.set(x1=x1)
    # If a rise time is not given, allow the model to determine this itself.
    if rise_time is None:
        if t_peak <= 0:
            model.set(z=z, t0=-t_first, mwebv=ebv_mw, hostebv=ebv_host, hostr_v=r_v)
        else:
            model.set(z=z, t0=0, mwebv=ebv_mw, hostebv=ebv_host, hostr_v=r_v)
    elif rise_time == -1:
        model.set(z=z, t0=t_first, mwebv=ebv_mw, hostebv=ebv_host, hostr_v=r_v)
    else:
        # Correct rise time to observer frame
        rise_time_obs = rise_time * (1 + z)
        model.set(z=z, t0=rise_time_obs - t_peak, mwebv=ebv_mw, hostebv=ebv_host, hostr_v=r_v)
    print(model)
    # Set peak absolute magnitude of model.
    if peak is not None:
        model.set_source_peakabsmag(peak, fil_peak, 'ab')
    if type(days) is tuple:
        # Set up array of times.
        days = np.arange(days[0], days[1] + 1, 0.1)

    mags_filters = table.Table()
    mags_filters['days'] = days
    maxes = []
    t_peaks = []
    peaks = []
    for f in filters:
        # Get light curve.
        mags = model.bandmag(f, 'ab', days)
        # If the light curve is mostly flat, the model has probably broken down.
        if np.sum(mags == mags[0]) < 0.9 * len(mags):
            # If this is False, the entire light curve must be nan, and we'll get nothing useful out of it.
            if not np.isnan(np.nanmax(mags)):
                # Collect peak (lowest) magnitudes, peak times, and maximum (faintest) magnitudes for each filter.
                maxes.append(np.nanmax(mags[mags != np.inf]))
                peaks.append(np.nanmin(mags[mags != np.inf]))
                t_peaks.append(days[np.nanargmin(mags)])
                # Write light curve to table.
                mags_filters[f] = mags
                if output_path is not None or show:
                    # Plot curve.
                    plt.plot(days, mags, label=f)

    # If we have no maxima, the model has broken down.
    if len(maxes) > 0:
        # Collect this for plotting purposes.
        max_mag = np.nanmax(maxes)
        min_mag = np.nanmin(peaks)
    else:
        return None, None

    # If an output_path directory is not given and 'show' is not True, there's no point doing the plot.
    if output_path is not None or show:
        for i, t_peak in enumerate(t_peaks):
            # Plot blue lines marking the peak of each filter light curve.
            plt.plot([t_peak, t_peak], [max_mag + 1, min_mag - 1], c='blue')

        if day_markers is not None:
            for other_day in day_markers:
                # Plot red lines marking the observation dates.
                plt.plot([other_day, other_day], [max_mag + 1, min_mag - 1], c='red')
        plt.xlabel('Time (days)')
        plt.ylabel('Magnitude')
        plt.ylim(max_mag + 1, min_mag - 1)
        plt.legend()
        if output_path is not None:
            # Save figure.
            plt.savefig(output_path + output_title + '.png')
            # Save table to csv.
            mags_filters.write(output_path + output_title + '.csv', format='ascii.csv')
        if show:
            # Show figure onscreen.
            plt.show()
        plt.close()

    return mags_filters, model
Пример #25
0
def main(data_title: 'str'):
    print("\nExecuting Python script pipeline_fors2/1-initial.py, with:")
    print(f"\tepoch {data_title}")
    print()

    epoch_params = p.object_params_fors2(obj=data_title)

    data_dir = epoch_params['data_dir']
    output_dir = data_dir + "/0-data_with_raw_calibs/"

    # Write tables of fits files to main directory; firstly, science images only:
    table = fits_table(input_path=output_dir,
                       output_path=data_dir + data_title + "_fits_table_science.csv",
                       science_only=True)
    # Then including all calibration files
    table_full = fits_table(input_path=output_dir,
                            output_path=data_dir + data_title + "_fits_table_all.csv",
                            science_only=False)

    fits_table_all(input_path=output_dir,
                   output_path=data_dir + data_title + "_fits_table_detailled.csv",
                   science_only=False)

    # Clear output files for fresh start.
    u.rm_check(data_dir + '/output_values.yaml')
    u.rm_check(data_dir + '/output_values.json')

    # Collect list of filters used:
    filters = []
    columns = []

    for j in [1, 2, 3, 4, 5]:
        column = 'filter' + str(j)
        for name in table[column]:
            if name != 'free':
                if name not in filters:
                    filters.append(name)
                    columns.append(column)

    # Collect pointings of standard-star observations.
    std_ras = []
    std_decs = []
    std_pointings = []
    # TODO: This is a horrible way to do this. Take the time to find a better one.
    for ra in table_full[table_full['object'] == 'STD']['ref_ra']:
        if ra not in std_ras:
            std_ras.append(ra)
    for dec in table_full[table_full['object'] == 'STD']['ref_dec']:
        if dec not in std_decs:
            std_decs.append(dec)

    for i, ra in enumerate(std_ras):
        std_pointings.append(f'RA{ra}_DEC{std_decs[i]}')

    print(std_ras)
    print(std_decs)
    print(std_pointings)

    # Collect and save some stats on those filters:
    param_dict = {}
    exp_times = []
    ns_exposures = []

    param_dict['filters'] = filters
    param_dict['object'] = table['object'][0]
    param_dict['obs_name'] = table['obs_name'][0]
    mjd = param_dict['mjd_obs'] = float(table['mjd_obs'][0])

    for i, f in enumerate(filters):
        f_0 = f[0]
        exp_time = table['exp_time'][table[columns[i]] == f]
        exp_times.append(exp_time)

        airmass_col = table['airmass'][table[columns[i]] == f]
        n_frames = sum(table[columns[i]] == f)
        n_exposures = n_frames / 2
        ns_exposures.append(n_exposures)

        airmass = float(np.nanmean(airmass_col))

        param_dict[f_0 + '_exp_time_mean'] = float(np.nanmean(exp_time))
        param_dict[f_0 + '_exp_time_err'] = float(2 * np.nanstd(exp_time))
        param_dict[f_0 + '_airmass_mean'] = airmass
        param_dict[f_0 + '_airmass_err'] = float(
            max(np.nanmax(airmass_col) - airmass, airmass - np.nanmin(airmass_col)))
        param_dict[f_0 + '_n_frames'] = float(n_frames)
        param_dict[f_0 + '_n_exposures'] = float(n_exposures)
        param_dict[f_0 + '_mjd_obs'] = float(np.nanmean(table['mjd_obs'][table[columns[i]] == f]))

        std_filter_dir = f'{data_dir}calibration/std_star/{f}/'
        u.mkdir_check(std_filter_dir)
        print(f'Copying {f} calibration data to std_star folder...')

        # Sort the STD files by filter, and within that by pointing.
        for j, ra in enumerate(std_ras):
            at_pointing = False
            pointing = std_pointings[j]
            pointing_dir = std_filter_dir + pointing + '/'
            for file in \
                    table_full[
                        (table_full['object'] == 'STD') &
                        (table_full['ref_ra'] == ra) &
                        (table_full[columns[i]] == f)]['identifier']:
                at_pointing = True
                u.mkdir_check(pointing_dir)
                shutil.copyfile(output_dir + file, pointing_dir + file)
            if at_pointing:
                for file in table_full[table_full['object'] == 'BIAS']['identifier']:
                    shutil.copyfile(output_dir + file, pointing_dir + file)
                for file in table_full[(table_full['object'] == 'FLAT,SKY') & (table_full[columns[i]] == f)][
                    'identifier']:
                    shutil.copyfile(output_dir + file, pointing_dir + file)

    p.add_output_values(obj=data_title, params=param_dict)
    if "new_epoch" in data_dir:
        mjd = f"MJD{int(float(mjd))}"
        new_data_dir = data_dir.replace("new_epoch", mjd)
        p.add_epoch_param(obj=data_title, params={"data_dir": new_data_dir})
Пример #26
0
def main(data_dir, data_title, origin, destination, all_synths):
    print("\nExecuting Python script pipeline_fors2/5-background_subtract.py, with:")
    print(f"\tepoch {data_title}")
    print(f"\torigin directory {origin}")
    print(f"\tdestination directory {destination}")
    print()

    methods = ["ESO backgrounds only", "SExtractor backgrounds only", "polynomial fit", "Gaussian fit", "median value"]

    if all_synths:
        frame = 56
        method = "polynomial fit"
        degree = 5
        do_mask = True
        local = True
        global_sub = False
        trim_image = False
        recorrect_subbed = True
        eso_back = False

    else:
        frame = 200
        # frame_arcsec = 30 * units.arcsec
        # frame_deg = frame_arcsec.to(units.deg)

        eso_back = False

        _, method = u.select_option(message="Please select the background subtraction method.", options=methods,
                                 default="polynomial fit")
        degree = None
        if method == "polynomial fit":
            degree = u.user_input(message=f"Please enter the degree of {method} to use:", typ=int, default=3)
        elif method == "ESO backgrounds only":
            eso_back = True
        do_mask = False
        if method not in ["ESO backgrounds only", "SExtractor backgrounds only", "median value"]:
            do_mask = u.select_yn(message="Mask sources using SExtractor catalogue?", default=True)
        if method in ["polynomial fit", "Gaussian fit"]:
            local = u.select_yn(message="Use a local fit?", default=True)
        else:
            local = False
        global_sub = False
        trim_image = False
        recorrect_subbed = False
        if local:
            global_sub = u.select_yn(message="Subtract local fit from entire image?", default="n")
            if not global_sub:
                trim_image = u.select_yn(message="Trim images to subtracted region?", default="y")
                recorrect_subbed = u.select_yn(message="Re-normalise background of subtracted region?", default="y")

        # if not eso_back and method != "SExtractor backgrounds only":
        #     eso_back = u.select_yn(message="Subtract ESO Reflex fitted backgrounds first?", default=False)

    outputs = p.object_output_params(data_title, instrument='FORS2')

    data_dir = u.check_trailing_slash(data_dir)

    destination = u.check_trailing_slash(destination)
    destination = data_dir + destination
    u.mkdir_check_nested(destination)

    origin = u.check_trailing_slash(origin)
    science_origin = data_dir + origin + "science/"
    print(science_origin)

    filters = outputs['filters']
    frb_params = p.object_params_frb(obj=data_title[:-2])
    epoch_params = p.object_params_fors2(obj=data_title)

    background_origin_eso = ""
    if eso_back:
        background_origin_eso = data_dir + "/" + origin + "/backgrounds/"

    if method == "SExtractor backgrounds only":
        background_origin = f"{data_dir}{origin}backgrounds_sextractor/"
    elif method == "polynomial fit":
        background_origin = f"{destination}backgrounds/"  # f"{destination}backgrounds_{method.replace(' ', '')}_degree_{degree}_local_{local}_globalsub_{global_sub}/"
    else:
        background_origin = f"{destination}backgrounds/"  # f"{destination}backgrounds_{method.replace(' ', '')}_local_{local}_globalsub_{global_sub}/"

    trimmed_path = ""
    if trim_image:
        trimmed_path = f"{data_dir}{origin}trimmed_to_background/"
        u.mkdir_check_nested(trimmed_path)

    ra = frb_params["burst_ra"]
    dec = frb_params["burst_dec"]

    if all_synths:
        ras = epoch_params["test_synths"]["ra"]
        decs = epoch_params["test_synths"]["dec"]
    else:
        ras = [ra]
        decs = [dec]

    for fil in filters:
        trimmed_path_fil = ""
        if trim_image:
            trimmed_path_fil = f"{trimmed_path}{fil}/"
            u.mkdir_check(trimmed_path_fil)
        background_fil_dir = f"{background_origin}{fil}/"
        u.mkdir_check_nested(background_fil_dir)
        science_destination_fil = f"{destination}science/{fil}/"
        u.mkdir_check_nested(science_destination_fil)
        files = os.listdir(science_origin + fil + "/")
        for file_name in files:
            if file_name.endswith('.fits'):
                new_file = file_name.replace("norm", "bg_sub")
                new_path = f"{science_destination_fil}/{new_file}"
                print("NEW_PATH:", new_path)
                science = science_origin + fil + "/" + file_name
                # First subtract ESO Reflex background images
                # frame = (frame_deg / f.get_pixel_scale(file=science, astropy_units=True)[1]).to(f.pix).value
                if eso_back:
                    background_eso = background_origin_eso + fil + "/" + file_name.replace("SCIENCE_REDUCED",
                                                                                           "PHOT_BACKGROUND_SCI")

                    ff.subtract_file(file=science, sub_file=background_eso, output=new_path)
                    science_image = new_path

                if method != "ESO backgrounds only":

                    print(ra, dec)

                    print("Science image:", science)
                    science_image = fits.open(science)
                    print("Science file:", science_image)
                    wcs_this = WCS(header=science_image[0].header)

                    if method == "SExtractor backgrounds only":
                        background = background_origin + fil + "/" + file_name + "_back.fits"
                        print("Background image:", background)
                    else:
                        if method == "median value":
                            print(science_image[0].data.shape)
                            _, background_value, _ = sigma_clipped_stats(science_image[0].data)
                            background = deepcopy(science_image)

                            background[0].data = np.full(shape=science_image[0].data.shape, fill_value=background_value)
                            background_path = background_origin + fil + "/" + file_name.replace("SCIENCE_REDUCED",
                                                                                                "PHOT_BACKGROUND_MEDIAN")

                            # Next do background fitting.
                        else:

                            background = deepcopy(science_image)
                            background[0].data = np.zeros(background[0].data.shape)
                            background_path = background_origin + fil + "/" + file_name.replace("SCIENCE_REDUCED",
                                                                                                "PHOT_BACKGROUND_FITTED")

                            for i, ra in enumerate(ras):
                                dec = decs[i]
                                x, y = wcs_this.all_world2pix(ra, dec, 0)
                                print(x, y)

                                bottom, top, left, right = ff.subimage_edges(data=science_image[0].data, x=x, y=y,
                                                                             frame=frame)

                                if do_mask:
                                    # Produce a pixel mask that roughly masks out the true sources in the image so that
                                    # they don't get fitted.
                                    mask_max = 10
                                    _, pixel_scale = ff.get_pixel_scale(science_image)
                                    sextractor = Table.read(
                                        f"{data_dir}analysis/sextractor/4-divided_by_exp_time/{fil}/{file_name.replace('.fits', '_psf-fit.cat')}",
                                        format='ascii.sextractor')
                                    weights = np.ones(shape=science_image[0].data.shape)

                                    for obj in filter(
                                            lambda o: left < o["X_IMAGE"] < right and bottom < o["Y_IMAGE"] < top,
                                            sextractor):
                                        mask_rad = min(int(obj["A_WORLD"] * obj["KRON_RADIUS"] / pixel_scale), mask_max)
                                        x_prime = int(np.round(obj["X_IMAGE"]))
                                        y_prime = int(np.round(obj["Y_IMAGE"]))
                                        weights[y_prime - mask_rad:y_prime + mask_rad,
                                        x_prime - mask_rad:x_prime + mask_rad] = 0.0

                                    plt.imshow(weights, origin="lower")
                                    plt.savefig(
                                        background_origin + fil + "/" + file_name.replace("norm.fits", "mask.png"))
                                else:
                                    weights = None

                                background_this = fit_background_fits(image=science_image,
                                                                      model_type=method[:method.find(" ")],
                                                                      deg=degree, local=local,
                                                                      global_sub=global_sub,
                                                                      centre_x=x, centre_y=y, frame=frame,
                                                                      weights=weights)
                                background[0].data += background_this[0].data

                                if recorrect_subbed:
                                    offset = get_median_background(image=science,
                                                                   ra=epoch_params["renormalise_centre_ra"],
                                                                   dec=epoch_params["renormalise_centre_dec"], frame=50,
                                                                   show=False,
                                                                   output=new_path[
                                                                          :new_path.find("bg_sub")] + "renorm_patch_")
                                    print("RECORRECT_SUBBED:", recorrect_subbed)
                                    print("SUBTRACTING FROM BACKGROUND:", offset)
                                    print(bottom, top, left, right)
                                    print(background[0].data[bottom:top, left:right].shape)
                                    print(np.median(background[0].data[bottom:top, left:right]))
                                    background[0].data[bottom:top, left:right] -= offset
                                    print(np.median(background[0].data[bottom:top, left:right]))

                                if trim_image:
                                    print("TRIMMED_PATH_FIL:", trimmed_path_fil)

                                    science_image = ff.trim_file(path=science_image, left=left, right=right, top=top,
                                                                 bottom=bottom,
                                                                 new_path=trimmed_path_fil + file_name.replace(
                                                                     "norm.fits",
                                                                     "trimmed_to_back.fits"))
                                    print("Science after trim:", science_image)

                                    background = ff.trim_file(path=background, left=left, right=right, top=top,
                                                              bottom=bottom,
                                                              new_path=background_path)

                            print("Writing background to:")
                            print(background_path)
                            background.writeto(background_path, overwrite=True)

                    print("SCIENCE:", science_image)
                    print("BACKGROUND:", background)

                    subbed = ff.subtract_file(file=science_image, sub_file=background, output=new_path)

                    # # TODO: check if regions overlap
                    #
                    # plt.hist(subbed[0].data[int(y - frame + 1):int(y + frame - 1),
                    #          int(x - frame + 1):int(x + frame - 1)].flatten(),
                    #          bins=10)
                    # plt.savefig(new_path[:new_path.find("bg_sub")] + "histplot.png")
                    # plt.close()

    copyfile(data_dir + "/" + origin + "/" + data_title + ".log", destination + data_title + ".log")
    u.write_log(path=destination + data_title + ".log",
                action=f'Backgrounds subtracted using 4-background_subtract.py with method {method}\n')
Пример #27
0
def main(data_title: str, show: bool = False):
    properties = p.object_params_xshooter(data_title)
    path = properties['data_dir']

    master_path = path + '/1-master_calibs/'
    reduced_path = path + '/2-reduced/'
    defringed_path = path + '/3-defringed/'

    u.mkdir_check(defringed_path)

    # Define fringe measurement points.

    # high_xs = [267, 267, 267, 267, 267, 267, 267, 267]
    # high_ys = [279, 279, 279, 279, 279, 279, 279, 279]
    # low_xs = [266, 266, 266, 267, 267, 270, 274, 273]
    # low_ys = [293, 295, 298, 301, 303, 305, 303, 292]

    high_xs = [219, 380, 426, 515, 156, 495, 310]
    high_ys = [166, 369, 185, 33, 59, 195, 70]
    low_xs = [219, 380, 424, 474, 160, 500, 315]
    low_ys = [120, 342, 213, 39, 34, 160, 35]

    # n_random = 1000
    #
    # high_xs = np.random.random(n_random)
    # high_xs *= 507
    # high_xs += 29
    # high_xs = np.round(high_xs)
    # high_xs = high_xs.astype(int)
    #
    # high_ys = np.random.random(n_random)
    # high_ys *= 200
    # high_ys += 20
    # high_ys = np.round(high_ys)
    # high_ys = high_ys.astype(int)
    #
    # low_xs = np.random.random(n_random)
    # low_xs *= 507
    # low_xs += 29
    # low_xs = np.round(low_xs)
    # low_xs = low_xs.astype(int)
    #
    # low_ys = np.random.random(n_random)
    # low_ys *= 200
    # low_ys += 20
    # low_ys = np.round(low_ys)
    # low_ys = low_ys.astype(int)

    filters = filter(lambda f: os.path.isdir(reduced_path + f),
                     os.listdir(reduced_path))
    for f in filters:
        print('Constructing fringe map for', f)
        filter_path = reduced_path + f + '/'
        defringed_filter_path = defringed_path + f + '/'
        master_filter_path = master_path + f + '/'
        u.mkdir_check(defringed_filter_path)

        files = list(
            filter(lambda file: file[-5:] == '.fits', os.listdir(filter_path)))
        # Construct fringe map by median-combining science images.
        fringe_map = ff.stack(files,
                              directory=filter_path,
                              output=master_filter_path + 'fringe_map.fits',
                              stack_type='median',
                              inherit=False,
                              show=show,
                              normalise=True)
        fringe_map = fringe_map[0].data
        map_differences = []

        for i in range(len(high_xs)):
            # Take
            high_y = high_ys[i]
            high_x = high_xs[i]
            high_cut = fringe_map[high_y - 1:high_y + 1, high_x - 1:high_x + 1]
            high = np.nanmedian(high_cut)

            low_y = low_ys[i]
            low_x = low_xs[i]
            low_cut = fringe_map[low_y - 1:low_y + 1, low_x - 1:low_x + 1]
            low = np.nanmedian(low_cut)

            map_differences.append(high - low)

        for file in os.listdir(filter_path):
            print(file)
            hdu = fits.open(filter_path + file)
            data = hdu[0].data
            image_differences = []
            factors = []
            for i in range(len(high_xs)):
                high_y = high_ys[i]
                high_x = high_xs[i]
                high_cut = data[high_y - 2:high_y + 2, high_x - 2:high_x + 2]
                high = np.nanmedian(high_cut)

                low_y = low_ys[i]
                low_x = low_xs[i]
                low_cut = data[low_y - 2:low_y + 2, low_x - 2:low_x + 2]
                low = np.nanmedian(low_cut)

                difference = high - low
                image_differences.append(difference)
                factor = difference / map_differences[i]
                factors.append(factor)
            used_factor = np.nanmedian(factors)
            adjusted_map = fringe_map * used_factor
            data = data - adjusted_map
            hdu[0].data = data

            norm = pl.nice_norm(data)
            if show:
                plt.imshow(data, norm=norm, origin='lower')
                plt.show()

            hdu.writeto(defringed_filter_path + file, overwrite=True)

        if show:
            norm = pl.nice_norm(fringe_map)
            plt.imshow(fringe_map, norm=norm, origin='lower')
            plt.scatter(high_xs, high_ys)
            plt.scatter(low_xs, low_ys)
            plt.show()

        copyfile(reduced_path + data_title + ".log",
                 defringed_path + data_title + ".log")
        u.write_log(path=defringed_path + data_title + ".log",
                    action='Edges trimmed using 4-trim.py\n')
Пример #28
0
def main(
    field_name: str,
    epoch_name: str,
    imaging: bool,
    spectroscopy: bool,
    instrument: str,
    furby_path: str,
    do: str,
    do_not_reuse_masters: bool,
    overwrite_download: bool,
    distance_tolerance: float,
    snr_min: float,
    class_star_tolerance: float,
    debug_level: int,
):
    u.debug_level = debug_level

    new_field = False

    directory = fld.load_epoch_directory()
    if epoch_name is not None:
        print(f"Looking for {epoch_name} in directory...")
        if epoch_name in directory:
            epoch_dict = directory[epoch_name]
            field_name = epoch_dict["field_name"]
            instrument = epoch_dict["instrument"]
            mode = epoch_dict["mode"]
            if mode == "imaging":
                imaging = True
            elif mode == "spectroscopy":
                spectroscopy = True
        else:
            print(f"{epoch_name} not found.")

    # Do automated FURBY process.
    if furby_path is not None:

        new_field = True
        furby = True

        imaging = True

        healpix_path = furby_path.replace(".json", "_hp.fits")
        if not os.path.isfile(healpix_path):
            healpix_path = None

        params = fld.FRBField.param_from_furby_json(
            json_path=furby_path,
            healpix_path=healpix_path,
        )
        field_name = params["name"]
        field = fld.Field.from_params(name=field_name)

        instrument = "vlt-fors2"

        epoch_name = f"{field_name}_FORS2_1"
        fld.FORS2ImagingEpoch.new_yaml(
            name=epoch_name,
            path=fld.FORS2ImagingEpoch.build_param_path(
                instrument_name=instrument,
                field_name=field_name,
                epoch_name=epoch_name),
            field=field.name,
            instrument=instrument,
            data_path=os.path.join(field_name, "imaging", instrument,
                                   epoch_name, ""))
        # epoch = fld.FORS2ImagingEpoch.from_params(
        #     name=epoch_name,
        #     instrument=instrument,
        #     field=field,
        #     old_format=False,
        # )

    else:
        if field_name is None:
            fields = ["New field"]
            fields += fld.list_fields()
            old_fields = fld.list_fields_old()
            for old_field in old_fields:
                if old_field not in fields and f"FRB20{old_field[3:]}" not in fields:
                    fields.append(old_field)
            opt, field_name = u.select_option(
                "No field specified. Please select one:",
                options=fields,
                sort=False)
            if opt == 0:
                new_field = True
                field_name = input("Please enter the name of the new field:\n")
        # Check for field param file
        if not new_field:
            field = fld.Field.from_params(name=field_name)
        else:
            field = None
        # If this field has no parameter file, ask to create one.
        if field is None:
            param_path = os.path.join(p.param_dir, "fields", "")
            # Check for old format param file, and ask to convert if found.
            old_field_name = f"FRB{field_name[-8:]}"
            old_params = p.object_params_frb(obj=old_field_name)
            print()
            field_param_path = os.path.join(param_path, field_name)
            u.mkdir_check(field_param_path)
            field_param_path_yaml = os.path.join(field_param_path,
                                                 f"{field_name}.yaml")
            if old_params is None:
                if not new_field:
                    print(f"{field_name} not found in the param directory.")
                if u.select_yn(
                        f"Create a new param file at '{field_param_path_yaml}'?"
                ):
                    fld.Field.new_params_from_input(
                        field_name=field_name,
                        field_param_path=field_param_path)
                else:
                    print("Exiting.")
                    exit(0)
            else:
                print("Old format param file detected.")
                if u.select_yn("Convert to new format?"):
                    fld.FRBField.convert_old_param(frb=old_field_name)
                else:
                    print("Exiting...")
                    exit(0)
            field = fld.Field.from_params(name=field_name)

    if spectroscopy:
        mode = "Spectroscopy"
    elif imaging:
        mode = "Imaging"
    else:
        _, mode = u.select_option(message="Please select a mode.",
                                  options=["Imaging", "Spectroscopy"])

    if mode == "Spectroscopy":
        if epoch_name is None:
            # Build a list of imaging epochs from that field.
            field.gather_epochs_spectroscopy()
            # Let the user select an epoch.
            epoch = field.select_epoch_spectroscopy()
        else:
            if instrument is None:
                instrument = fld.select_instrument(mode="spectroscopy")
            epoch = fld.SpectroscopyEpoch.from_params(epoch_name,
                                                      instrument=instrument,
                                                      field=field)

    else:  # if mode == "Imaging"
        if epoch_name is None:
            # Build a list of imaging epochs from that field.
            if type(field) is fld.FRBField:
                field.gather_epochs_old()
            field.gather_epochs_imaging()
            # Let the user select an epoch.
            epoch = field.select_epoch_imaging()
        else:
            if instrument is None:
                instrument = fld.select_instrument(mode="imaging")
            epoch = fld.ImagingEpoch.from_params(epoch_name,
                                                 instrument=instrument,
                                                 field=field)
            epoch.field = field

    u.debug_print(2, "pipeline.py: type(epoch) ==", type(epoch))
    epoch.do = do
    epoch.pipeline(do_not_reuse_masters=do_not_reuse_masters,
                   overwrite_download=overwrite_download,
                   distance_tolerance=distance_tolerance,
                   snr_min=snr_min,
                   class_star_tolerance=class_star_tolerance)
Пример #29
0
def main(data_title: str, show: bool = False):
    properties = p.object_params_xshooter(data_title)
    path = properties['data_dir']

    raw_path = path + '/0-data_with_raw_calibs/'
    master_path = path + '/1-master_calibs/'
    reduced_path = path + '/2-reduced/'

    u.mkdir_check(raw_path)
    u.mkdir_check(master_path)
    u.mkdir_check(reduced_path)

    files = os.listdir(raw_path)
    biases = []
    flats = {}
    science = {}

    airmasses = {}

    print('Creating lists of files.')

    for file in files:
        if file[-5:] == '.fits':
            hdu = fits.open(raw_path + file)
            header = hdu[0].header
            obj = header['OBJECT']
            f = header['ESO INS FILT1 NAME']
            if 'BIAS' in obj:
                biases.append(file)
            elif 'FLAT' in obj:
                if f not in flats:
                    flats[f] = []
                flats[f].append(file)
            else:
                if f not in science:
                    science[f] = []
                    airmasses[f] = []
                science[f].append(file)

    bias_hdus = []
    for bias in biases:
        bias_hdus.append(bias)
    # Stack biases.
    print(f'Processing biases.')
    ff.stack(bias_hdus, output=master_path + f'master_bias.fits', stack_type='median', directory=raw_path, inherit=False)
    master_bias = CCDData.read(master_path + f'master_bias.fits', unit='du')

    # Loop through filters.
    for f in science:

        flats_filter = flats[f]
        master_path_filter = master_path + f + '/'
        u.mkdir_check(master_path_filter)

        print(f'Processing flats for filter {f}.')
        flats_ccds = []
        for flat in flats_filter:
            flat_ccd = CCDData.read(raw_path + flat, unit='du')
            # Subtract master bias from each flat.
            flat_ccd = ccdproc.subtract_bias(ccd=flat_ccd, master=master_bias)
            flats_ccds.append(flat_ccd)
        # Stack debiased flats.
        master_flat = ff.stack(flats_ccds, output=None, stack_type='median', inherit=False)
        master_flat.writeto(master_path_filter + f'master_flat.fits', overwrite=True)
        master_flat = CCDData.read(master_path_filter + f'master_flat.fits', unit='du')

        science_filter = science[f]

        reduced_path_filter = reduced_path + f + '/'
        u.mkdir_check(reduced_path_filter)
        # Loop through the science images.
        for image in science_filter:
            print(f'Reducing {image}.')
            image_ccd = CCDData.read(raw_path + image, unit='du')
            if show:
                norm = ImageNormalize(image_ccd.data, interval=ZScaleInterval(), stretch=SqrtStretch())
                plt.imshow(image_ccd.data, origin='lower', norm=norm)
                plt.title('Unreduced image')
                plt.show()
            # Subtract master bias from science image.
            image_ccd = ccdproc.subtract_bias(image_ccd, master_bias)
            if show:
                norm = ImageNormalize(image_ccd.data, interval=ZScaleInterval(), stretch=SqrtStretch())
                plt.imshow(image_ccd.data, origin='lower', norm=norm)
                plt.title('After debiasing')
                plt.show()
            # Divide by master flat.
            image_ccd = ccdproc.flat_correct(image_ccd, master_flat)
            if show:
                norm = ImageNormalize(image_ccd.data, interval=ZScaleInterval(), stretch=SqrtStretch())
                plt.imshow(image_ccd.data, origin='lower', norm=norm)
                plt.title('After flatfielding')
                plt.show()
            # Convert back to HDU object for saving.
            image_ccd = image_ccd.to_hdu()
            image_ccd.writeto(reduced_path_filter + image, overwrite=True)
    u.write_log(path=reduced_path + data_title + ".log", action=f'Reduced using 1-reduce.py')
Пример #30
0
def main(obj, test, n, mag_lower, mag_upper, colour_upper, colour_lower):
    properties = p.object_params_fors2(obj)
    output = p.object_output_params(obj=obj, instrument='FORS2')
    paths = p.object_output_paths(obj)
    burst_properties = p.object_params_frb(obj[:-2])

    synth_path = properties['data_dir'] + 'synthetic/'

    u.mkdir_check(synth_path)
    synth_path = synth_path + 'random/'
    u.mkdir_check(synth_path)
    now = time.Time.now()
    now.format = 'isot'
    test = str(now) + '_' + test
    test_path = synth_path + test + '/'
    u.mkdir_check(test_path)

    filters = {}
    bluest = None
    bluest_lambda = np.inf
    for f in output['filters']:
        filter_properties = p.filter_params(f=f, instrument='FORS2')
        filters[f] = filter_properties
        lambda_eff = filter_properties['lambda_eff']
        if lambda_eff < bluest_lambda:
            bluest_lambda = lambda_eff
            bluest = f

    # Insert random sources in the bluest filter.

    f_0 = bluest[0]
    output_properties = p.object_output_params(obj)
    fwhm = output_properties[f_0 + '_fwhm_pix']
    zeropoint, _, airmass, _ = ph.select_zeropoint(obj,
                                                   bluest,
                                                   instrument='fors2')

    base_path = paths[f_0 + '_subtraction_image']

    output_path = test_path + f_0 + '_random_sources.fits'
    _, sources = ph.insert_random_point_sources_to_file(file=base_path,
                                                        fwhm=fwhm,
                                                        output=output_path,
                                                        n=n,
                                                        airmass=airmass,
                                                        zeropoint=zeropoint)

    p.add_output_path(obj=obj,
                      key=f_0 + '_subtraction_image_synth_random',
                      path=output_path)

    # Now insert sources at the same positions in other filters, but with magnitudes randomised.
    for f in filters:
        if f != bluest:
            f_0 = f[0]
            output_properties = p.object_output_params(obj)
            fwhm = output_properties[f_0 + '_fwhm_pix']
            zeropoint, _, airmass, _ = ph.select_zeropoint(obj,
                                                           f,
                                                           instrument='fors2')

            base_path = paths[f_0 + '_subtraction_image']

            mag = np.random.uniform(mag_lower, mag_upper, size=n)

            output_path = test_path + f_0 + '_random_sources.fits'
            ph.insert_point_sources_to_file(file=base_path,
                                            fwhm=fwhm,
                                            output=output_path,
                                            x=sources['x_0'],
                                            y=sources['y_0'],
                                            mag=mag,
                                            airmass=airmass,
                                            zeropoint=zeropoint)

            p.add_output_path(obj=obj,
                              key=f_0 + '_subtraction_image_synth_random',
                              path=output_path)