Exemplo n.º 1
0
def runpipe5(cluster):
    # This portion of the pypeline
    combined_dir = cluster.combined_directory

    io.make_directory(combined_dir)

    while not io.file_exists(cluster.master_crop_file):
        print("Master crop file not found")
        run_ds9_for_master_crop(cluster)

    for observation in cluster.observations:

        # clean data
        infile = "{}[sky=region({})]".format(observation.clean,
                                             cluster.master_crop_file)
        outfile = cluster.temp_acisI_comb
        clobber = True

        rt.dmcopy.punlearn()
        rt.dmcopy(infile=infile, outfile=outfile, clobber=clobber)

        infile = "{}[bin sky=4][energy=700:8000]".format(
            cluster.temp_acisI_comb)
        outfile = observation.acisI_comb_img
        clobber = True

        print("ObsID: {}\t- Extracting just 0.7keV - 8keV.".format(
            observation.id))
        rt.dmcopy.punlearn()
        rt.dmcopy(infile=infile, outfile=outfile, clobber=clobber)

        # background
        infile = "{}[sky=region({})]".format(observation.back,
                                             cluster.master_crop_file)
        outfile = cluster.temp_backI_comb
        clobber = True

        rt.dmcopy.punlearn()
        rt.dmcopy(infile=infile, outfile=outfile, clobber=clobber)

        infile = "{}[bin sky=4][energy=700:8000]".format(
            cluster.temp_backI_comb)
        outfile = observation.backI_comb_img
        clobber = True

        rt.dmcopy.punlearn()
        rt.dmcopy(infile=infile, outfile=outfile, clobber=clobber)

        io.delete(cluster.temp_acisI_comb)
        io.delete(cluster.temp_backI_comb)

        make_mask_file(observation)
        make_cumulative_mask_file(cluster, observation)

    create_combined_images(cluster)
    make_nosrc_cropped_xray_sb(cluster)
Exemplo n.º 2
0
def get_cluster(config_file_arg):
    if io.file_exists(config_file_arg):
        cluster_obj = cluster.read_cluster_data(config_file_arg)
    else:
        config_file = cluster.get_cluster_config(config_file_arg)
        if config_file:
            cluster_obj = cluster.read_cluster_data(config_file)
        else:
            print("Error finding cluster configuration file. Try passing the full path to the file.")
            return
    return cluster_obj
Exemplo n.º 3
0
def runpipe5(cluster):
    print("runpipe5")
    from astropy.io import fits
    # This portion of the pypeline 
    combined_dir = cluster.combined_directory

    io.make_directory(combined_dir)

    while not io.file_exists(cluster.master_crop_file):
        print("Master crop file not found")
        run_ds9_for_master_crop(cluster)

    # the contents of this for loop should be refactored/replaced with the make_acisI_and_back function
    for observation in cluster.observations:
        infile = "{}[sky=region({})]".format(observation.clean, cluster.master_crop_file)
        outfile = cluster.temp_acisI_comb
        clobber = True

        rt.dmcopy.punlearn()
        rt.dmcopy(infile=infile, outfile=outfile, clobber=clobber)

        print("{} shape: {}".format(outfile, fits.open(outfile)[0].shape))

        infile = "{}[bin sky=4][energy=700:8000]".format(cluster.temp_acisI_comb)
        outfile = observation.acisI_comb_img
        clobber = True

        print("ObsID: {}\t- Extracting just 0.7keV - 8keV.".format(observation.id))
        rt.dmcopy.punlearn()
        rt.dmcopy(infile=infile, outfile=outfile, clobber=clobber)

        # background
        infile = "{}[sky=region({})]".format(observation.back, cluster.master_crop_file)
        outfile = cluster.temp_backI_comb
        clobber = True

        rt.dmcopy.punlearn()
        rt.dmcopy(infile=infile, outfile=outfile, clobber=clobber)

        infile = "{}[bin sky=4][energy=700:8000]".format(cluster.temp_backI_comb)
        outfile = observation.backI_comb_img
        clobber = True

        rt.dmcopy.punlearn()
        rt.dmcopy(infile=infile, outfile=outfile, clobber=clobber)

        io.delete(cluster.temp_acisI_comb)
        io.delete(cluster.temp_backI_comb)

        make_mask_file(observation)
        make_cumulative_mask_file(cluster, observation)

    create_combined_images(cluster)
    make_nosrc_cropped_xray_sb(cluster)
Exemplo n.º 4
0
def process_commandline_arguments(cluster_obj):
    print("Processing commandline arguments")
    args = get_arguments()
    cluster_obj = None
    if io.file_exists(args.batch_file):
        ciao.automated_cluster_init(args.batch_file)
    if None not in [args.name, args.abundance, args.nH, args.redshift, args.obsids]:
        ciao.initialize_cluster(name=args.name, obsids=args.obsids, abundance=args.abundance,
                                redshift=args.redshift, nH=args.nH)
    if args.init_cluster:
        cluster_obj.initialize_cluster()
    if args.config_file != "":
        if io.file_exists(args.config_file):
            cluster_obj = cluster.read_cluster_data(args.config_file)
        else:
            config_file = cluster.get_cluster_config(args.config_file)
            if config_file:
                cluster_obj = cluster.read_cluster_data(config_file)
            else:
                print("Error finding cluster configuration file. Try passing the full path to the file.")
                return
    if args.cont:
        if cluster_obj is None:
            cluster_obj = config.current_cluster()
            if cluster_obj is None:
                print("Cannot find a current working cluster.")
                exit(-1)
        print("Continuing {}.".format(cluster_obj.name))

        ciao.start_from_last(cluster_obj)
    if args.download_data:
        if 0 == cluster_obj.last_step_completed:
            cluster_obj.get_cluster_info_from_user()
        ciao.download_data(cluster_obj)
    if args.remove_sources:
        ciao.remove_sources(cluster_obj)
    return cluster_obj
Exemplo n.º 5
0
def make_response_files(cluster):
    for obsid in cluster.observation_ids:
        print("Making response files for observation {}".format(obsid))
        obs_analysis_dir = cluster.obs_analysis_directory(obsid)
        region_file = io.get_path("{}/acisI_region_0.reg".format(obs_analysis_dir))

        if (not io.file_exists(region_file)) or (io.file_size(region_file) == 0):
            print("Region file {} does not exist.".format(region_file))
            print("When DS9 opens, draw a small circle that covers a piece of each ACIS-I chip (~20 pixels) and save it as:\n" \
                  "{}".format(region_file))
            print("Opening SAO DS9")
            io.write_contents_to_file("", region_file, False)
            ds9_arguments = "ds9 -regions system physical -regions shape circle -regions format ciao -zoom 0.5 " \
                            "-bin factor 4 {}/acisI_clean.fits".format(obs_analysis_dir)
            subprocess.run([ds9_arguments], shell=True)
        print('Creating global response file.')
        create_global_response_file_for(cluster, obsid, region_file)
Exemplo n.º 6
0
def process_commandline_arguments(cluster_obj):
    print("Processing commandline arguments")
    args = get_arguments()
    cluster_obj = None
    if None not in [
            args.name, args.abundance, args.nH, args.redshift, args.obsids
    ]:
        ciao.initialize_cluster(name=args.name,
                                obsids=args.obsids,
                                abundance=args.abundance,
                                redshift=args.redshift,
                                nH=args.nH)
    if args.init_cluster:
        cluster_obj.initialize_cluster()
    if args.config_file != "":
        if io.file_exists(args.config_file):
            cluster_obj = cluster.read_cluster_data(args.config_file)
        else:
            config_file = cluster.get_cluster_config(args.config_file)
            if config_file:
                cluster_obj = cluster.read_cluster_data(config_file)
            else:
                print(
                    "Error finding cluster configuration file. Try passing the full path to the file."
                )
                return
        if args.find_sources:
            ciao.find_sources(cluster_obj, ecf=args.ecf, energy=args.energy)

    if args.cont:
        if cluster_obj is None:
            cluster_obj = config.current_cluster()
            if cluster_obj is None:
                print("Cannot find a current working cluster.")
                exit(-1)

        if args.parallel and cluster_obj.last_step_completed == '1':
            ciao.run_stage_2_parallel(cluster_obj, args)
        else:
            ciao.start_from_last(cluster_obj, args)

    return cluster_obj
Exemplo n.º 7
0
def process_commandline_arguments(cluster_obj):
    print("Processing commandline arguments")
    args = get_arguments()
    if io.file_exists(args.batch_file):
        ciao.automated_cluster_init(args.batch_file)
    if None not in [args.name, args.abundance, args.nH, args.redshift, args.obsids]:
        ciao.initialize_cluster(name=args.name, obsids=args.obsids, abundance=args.abundance,
                                redshift=args.redshift, nH=args.nH)
    if args.init_cluster:
        cluster_obj.initialize_cluster()
    if args.config_file != "":
        cluster_obj = get_cluster(args.config_file)
    if args.cont:
        ciao.start_from_last(cluster_obj)
    if args.download_data:
        if 0 == cluster_obj.last_step_completed:
            cluster_obj.get_cluster_info_from_user()
        ciao.download_data(cluster_obj)
    if args.remove_sources:
        ciao.remove_sources(cluster_obj)
    return cluster_obj
Exemplo n.º 8
0
def make_cumulative_mask_file(cluster, observation):
    from astropy.io import fits
    cumulative_mask_filename = cluster.combined_mask

    current_obs_mask_filename = observation.acisI_combined_mask

    if not io.file_exists(cumulative_mask_filename):
        print("Cumulative mask file not found. Creating it.")
        cumulative_mask = fits.open(current_obs_mask_filename)
        cumulative_mask.writeto(cumulative_mask_filename)
    else:
        current_mask = fits.open(current_obs_mask_filename)
        cumulative_mask = fits.open(cumulative_mask_filename)

        current_mask[0].data = current_mask[0].data + cumulative_mask[0].data
        # from idl
        # newcumulativemask = currentmask + cumulativemask
        # newcumulativemask[where(newcumulativemask gt 1)] = 1
        # all values that are greater than 1 make 1?
        current_mask[0].data[np.where(current_mask[0].data > 1)] = 1
        current_mask.writeto(cumulative_mask_filename, overwrite=True)
Exemplo n.º 9
0
def make_cumulative_mask_file(cluster, observation):
    from astropy.io import fits
    cumulative_mask_filename = cluster.combined_mask

    current_obs_mask_filename = observation.acisI_combined_mask

    if not io.file_exists(cumulative_mask_filename):
        print("Cumulative mask file not found. Creating it.")
        cumulative_mask = fits.open(current_obs_mask_filename)
        cumulative_mask.writeto(cumulative_mask_filename)
    else:
        current_mask = fits.open(current_obs_mask_filename)
        cumulative_mask = fits.open(cumulative_mask_filename)

        print("Cumulative mask {} shape:{}".format(cumulative_mask_filename,
                                                   cumulative_mask[0].shape))
        print("current mask {} shape:{}".format(current_obs_mask_filename,
                                                   current_mask[0].shape))

        current_mask[0].data = current_mask[0].data + cumulative_mask[0].data

        current_mask[0].data[np.where(current_mask[0].data > 1)] = 1
        current_mask.writeto(cumulative_mask_filename, overwrite=True)
Exemplo n.º 10
0
def prepare_efftime_circle(cluster):
    try:
        from ciao_contrib import runtool as rt
    except ImportError:
        print("Failed to import CIAO python scripts. ")
        raise

    for observation in cluster.observations:
        io.delete_if_exists(observation.effbtime)
        io.delete_if_exists(observation.effdtime)

        if not io.file_exists(observation.acisI_nosrc_combined_mask_file):
            print("Removing point sources from the observations combined mask file.")
            print("dmcopy infile='{}[exclude sky=region({})]' outfile={} clobber=True".format(
                observation.acisI_combined_mask_file,
                cluster.sources_file,
                observation.acisI_nosrc_combined_mask_file
            ))
            rt.dmcopy.punlearn()
            rt.dmcopy(
                infile="{fits_file}[exclude sky=region({source_file})]".format(
                    fits_file=observation.acisI_combined_mask_file,
                    source_file=cluster.sources_file
                ),
                outfile=observation.acisI_nosrc_combined_mask_file,
                clobber=True
            )
        else:
            print("{acis} already exists.".format(
                acis=observation.acisI_nosrc_combined_mask_file
            ))

        if not io.file_exists(observation.acisI_high_energy_combined_image_file):
            print("Creating high band (9.5-12 keV) source image cropped to combined region.")
            rt.dmcopy.punlearn()
            rt.dmcopy(
                infile="{fits_file}[sky=region({crop_file})]".format(
                    fits_file=observation.clean,
                    crop_file=cluster.master_crop_file
                ),
                outfile=observation.acisI_high_energy_temp_image,
                clobber=True
            )

            rt.dmcopy.punlearn()
            rt.dmcopy(
                infile="{fits_file}[EVENTS][bin sky=4][energy=9500:12000]".format(
                    fits_file=observation.acisI_high_energy_temp_image
                ),
                outfile=observation.acisI_high_energy_combined_image_file,
                option="image",
                clobber=True
            )
        else:
            print("{fits_file} already exists.".format(
                fits_file=observation.acisI_high_energy_combined_image_file
            ))

        io.delete_if_exists(observation.acisI_high_energy_temp_image)

        if not io.file_exists(observation.backI_high_energy_combined_image_file):
            print("Creating high band (9.5-12 keV) background image cropped to combined region.")
            rt.dmcopy.punlearn()
            rt.dmcopy(
                infile="{fits_file}[sky=region({crop_file})]".format(
                    fits_file=observation.back,
                    crop_file=cluster.master_crop_file
                ),
                outfile=observation.backI_high_energy_temp_image,
                clobber=True
            )

            rt.dmcopy.punlearn()
            rt.dmcopy(
                infile="{fits_file}[EVENTS][bin sky=4][energy=9500:12000]".format(
                    fits_file=observation.backI_high_energy_temp_image
                ),
                outfile=observation.backI_high_energy_combined_image_file,
                option="image",
                clobber=True
            )
        else:
            print("{fits_file} already exists.".format(
                fits_file=observation.backI_high_energy_combined_image_file
            ))

        io.delete_if_exists(observation.backI_high_energy_temp_image)
Exemplo n.º 11
0
def prepare_effective_time_circles_for(observation: cluster.Observation):
    io.delete_if_exists(observation.effbtime)
    io.delete_if_exists(observation.effdtime)

    if not io.file_exists(observation.acisI_nosrc_combined_mask_file):
        print("Removing point sources from the observations combined mask file.")
        print("dmcopy infile='{}[exclude sky=region({})]' outfile={} clobber=True".format(
            observation.acisI_combined_mask_file,
            observation.cluster.sources_file,
            observation.acisI_nosrc_combined_mask_file
        ))
        rt.dmcopy.punlearn()
        rt.dmcopy(
            infile="{fits_file}[exclude sky=region({source_file})]".format(
                fits_file=observation.acisI_combined_mask_file,
                source_file=observation.cluster.sources_file
            ),
            outfile=observation.acisI_nosrc_combined_mask_file,
            clobber=True
        )
    else:
        print("{acis} already exists.".format(
            acis=observation.acisI_nosrc_combined_mask_file
        ))

    # if not io.file_exists(observation.acisI_high_energy_combined_image_file):
    print("Creating high band (9.5-12 keV) source image cropped to combined region.")
    rt.dmcopy.punlearn()
    rt.dmcopy(
        infile="{fits_file}[sky=region({crop_file})]".format(
            fits_file=observation.clean,
            crop_file=observation.cluster.master_crop_file
        ),
        outfile=observation.acisI_high_energy_temp_image,
        clobber=True
    )

    ##########need to change to obs specific

    rt.dmcopy.punlearn()
    rt.dmcopy(
        infile="{fits_file}[EVENTS][bin sky=4][energy=9500:12000]".format(
            fits_file=observation.acisI_high_energy_temp_image
        ),
        outfile=observation.acisI_high_energy_combined_image_file,
        option="image",
        clobber=True
    )

    io.delete_if_exists(observation.acisI_high_energy_temp_image)

    print("Creating high band (9.5-12 keV) background image cropped to combined region.")
    rt.dmcopy.punlearn()
    rt.dmcopy(
        infile="{fits_file}[sky=region({crop_file})]".format(
            fits_file=observation.back,
            crop_file=observation.cluster.master_crop_file
        ),
        outfile=observation.backI_high_energy_temp_image,
        clobber=True
    )

    rt.dmcopy.punlearn()
    rt.dmcopy(
        infile="{fits_file}[EVENTS][bin sky=4][energy=9500:12000]".format(
            fits_file=observation.backI_high_energy_temp_image
        ),
        outfile=observation.backI_high_energy_combined_image_file,
        option="image",
        clobber=True
    )

    io.delete_if_exists(observation.backI_high_energy_temp_image)
Exemplo n.º 12
0
def lightcurves_with_exclusion(cluster):
    for observation in cluster.observations:


        # data_nosrc_hiEfilter = "{}/acisI_nosrc_fullE.fits".format(obs_analysis_dir)

        data_nosrc_hiEfilter = "{}/acisI_nosrc_hiEfilter.fits".format(observation.analysis_directory)

        print("Creating the image with sources removed")

        data = observation.acis_nosrc_filename

        image_nosrc = "{}/img_acisI_nosrc_fullE.fits".format(observation.analysis_directory)

        if io.file_exists(observation.exclude_file):
            print("Removing sources from event file to be used in lightcurve")

            infile = "{}[exclude sky=region({})]".format(data_nosrc_hiEfilter, observation.exclude)
            outfile = "{}/acisI_lcurve.fits".format(observation.analysis_directory)
            clobber = True

            rt.dmcopy.punlearn()
            rt.dmcopy(infile=infile, outfile=outfile, clobber=clobber)

            data_lcurve = "{}/acisI_lcurve.fits".format(observation.analysis_directory)
        else:
            yes_or_no = io.check_yes_no(
                "Are there sources to be excluded from observation {} while making the lightcurve? ".format(observation.id))

            if yes_or_no:  # yes_or_no == True
                print("Create the a region file with the region to be excluded and save it as {}".format(observation.exclude_file))
            else:
                data_lcurve = data_nosrc_hiEfilter

        backbin = 259.28

        echo = True
        tstart = rt.dmkeypar(infile=data_nosrc_hiEfilter, keyword="TSTART", echo=echo)
        tstop = rt.dmkeypar(infile=data_nosrc_hiEfilter, keyword="TSTOP", echo=echo)

        print("Creating lightcurve from the events list with dmextract")

        infile = "{}[bin time={}:{}:{}]".format(data_lcurve, tstart, tstop, backbin)
        outfile = "{}/acisI_lcurve.lc".format(observation.analysis_directory)
        opt = "ltc1"

        rt.dmextract.punlearn()
        rt.dmextract(infile=infile, outfile=outfile, opt=opt, clobber=clobber)

        lcurve = outfile

        print("Cleaning the lightcurve by removing flares with deflare. Press enter to continue.")

        rt.deflare.punlearn()
        infile = lcurve
        outfile = "{}/acisI_gti.gti".format(observation.analysis_directory)
        method = "clean"
        save = "{}/acisI_lcurve".format(observation.analysis_directory)

        rt.deflare(infile=infile, outfile=outfile, method=method, save=save)

        gti = outfile

        print("filtering the event list using GTI info just obtained.")

        infile = "{}[@{}]".format(data_nosrc_hiEfilter, gti)
        outfile = observation.clean
        clobber = True

        rt.dmcopy(infile=infile, outfile=outfile, clobber=clobber)

        data_clean = outfile

        print("Don't forget to check the light curves!")
Exemplo n.º 13
0
def ciao_back(cluster, overwrite=False):
    print("Running ciao_back on {}.".format(cluster.name))

    for observation in cluster.observations:
        pcad_file = make_pcad_lis(cluster, observation.id)
        backI_lis = []
        backS_lis = []
        analysis_path = observation.analysis_directory
        filelist = io.read_contents_of_file(observation.ccd_merge_list).split('\n')
        pcad = io.read_contents_of_file(pcad_file)
        for acis_file in filelist:
            rt.acis_bkgrnd_lookup.punlearn()
            print("Finding background for {}".format(acis_file))
            path_to_background = rt.acis_bkgrnd_lookup(infile=acis_file)
            print("Found background at {}".format(path_to_background))
            acis_id = int(acis_file.split('/')[-1].split('.')[-2][-1])
            assert isinstance(acis_id, int), "acis_id = {}".format(acis_id)
            assert not isinstance(path_to_background, type(None)), "Cannot find background {}".format(acis_file)

            local_background_path = io.get_path("{}/back_ccd{}.fits".format(analysis_path, acis_id))
            try:
                if io.file_exists(local_background_path) and overwrite:
                    io.delete(local_background_path)
                io.copy(path_to_background, local_background_path)
            except OSError:
                print("Problem copying background file {}. Do you have the right permissions and a full CALDB?".format(
                    path_to_background))

                raise

            acis_gain = rt.dmkeypar(infile=acis_file,
                                    keyword="GAINFILE",
                                    echo=True)
            background_gain = rt.dmkeypar(infile=local_background_path,
                                          keyword="GAINFILE",
                                          echo=True)

            print("{}/{}/acis_ccd{}.fits gain: {}".format(cluster.name, observation.id, acis_id, acis_gain))
            print("{}/{}/back_ccd{}.fits gain: {}".format(cluster.name, observation.id, acis_id, background_gain))

            if dates_and_versions_match(acis_gain, background_gain):
                print("Date/version numbers don't match on the acis data and background. Reprocessing.")

                local_background_path = reprocess(cluster, observation.id, acis_gain, background_gain, acis_id)

            print("Reprojecting background")
            rt.reproject_events.punlearn()
            infile = local_background_path
            outfile = io.get_path("{local_path}/back_reproj_ccd{acis_id}.fits".format(local_path=analysis_path,
                                                                                      acis_id=acis_id))
            match = acis_file

            print(
                "Running:\n reproject_events(infile={infile}, outfile={outfile}, aspect={pcad}, match={match})".format(
                    infile=infile, outfile=outfile, pcad=pcad, match=match)
            )
            rt.reproject_events(infile=infile,
                                outfile=outfile,
                                aspect="{pcad_file}".format(pcad_file=pcad),
                                match=match,
                                random=0,
                                clobber=True)

            back_reproject = outfile
            datamode = rt.dmkeypar(infile=io.get_filename_matching(io.get_path("{}/acis*evt1*.fits".format(analysis_path))),
                                   keyword="DATAMODE")
            if datamode == "VFAINT":
                print("VFAINT Mode, resetting setting status bits")
                rt.dmcopy.punlearn()
                rt.dmcopy(infile="{}[status=0]".format(back_reproject),
                          outfile=outfile,
                          clobber=True)
            if acis_id <= 3:
                backI_lis.append(back_reproject)
            else:
                backS_lis.append(back_reproject)

        merged_back_list = backI_lis + backS_lis

        print("writing backI.lis and backS.lis")
        io.write_contents_to_file("\n".join(backI_lis), io.get_path("{}/backI.lis".format(analysis_path)),
                                  binary=False)
        io.write_contents_to_file("\n".join(backS_lis), io.get_path("{}/backS.lis".format(analysis_path)),
                                  binary=False)
        io.write_contents_to_file("\n".join(merged_back_list), observation.merged_back_lis, binary=False)

    return
Exemplo n.º 14
0
    T = clstr.temperature_map
    header = clstr.temperature_map_header

    n, T = do.make_sizes_match(n, T)

    P = n*T

    norm_P = do.normalize_data(P)

    fits.writeto(clstr.pressure_map_filename, norm_P, header=header, overwrite=True)


if __name__ == '__main__':
    args, parser = get_arguments()
    if args.cluster_config is not None:
        if io.file_exists(args.cluster_config):
            clstr = cluster.read_cluster_data(args.cluster_config)
        else:
            clstr_config = cluster.get_cluster_config(args.cluster_config)
            if io.file_exists(clstr_config):
                clstr = cluster.read_cluster_data(clstr_config)
            else:
                print("Error finding cluster configuration file. Try passing the full path and filename.")
                exit(1)

        if args.commands:
            make_commands_lis(clstr, args.resolution)
        if args.eff_times_fits:
            eff_times_to_fits(clstr)
        elif args.temperature_map:
            print("Creating temperature map.")