Example #1
0
def make_acisI_and_back_for(observation, cluster):
    rt.dmcopy.punlearn()
    rt.dmcopy(infile="{clean_file}[sky=region({mask})]".format(
        clean_file=observation.clean, mask=cluster.master_crop_file),
              outfile=cluster.temp_acisI_comb,
              clobber=True)

    rt.dmcopy.punlearn()
    rt.dmcopy(
        infile="{temp_acisI_combined}[bin sky=4][energy=700:8000]".format(
            temp_acisI_combined=cluster.temp_acisI_comb),
        outfile=observation.acisI_comb_img,
        clobber=True)

    rt.dmcopy.punlearn()
    rt.dmcopy(infile="{back_file}[sky=region({mask})]".format(
        back_file=observation.back, mask=cluster.master_crop_file),
              outfile=cluster.temp_backI_comb,
              clobber=True)

    rt.dmcopy.punlearn()
    rt.dmcopy(
        infile="{temp_backI_combined}[bin sky=4][energy=700:8000]".format(
            temp_backI_combined=cluster.temp_backI_comb),
        outfile=observation.backI_comb_img,
        clobber=True)

    io.delete(cluster.temp_acisI_comb)
    io.delete(cluster.temp_backI_comb)
Example #2
0
def make_mask_file(observation):
    from astropy.io import fits
    print("Creating an image mask for {}.".format(observation.id))

    original_fits_filename = observation.acisI_comb_img

    mask = fits.open(original_fits_filename)
    mask[0].data = np.ones_like(mask[0].data)

    mask_filename = observation.temp_acis_comb_mask_filename

    mask.writeto(mask_filename, overwrite=True)

    rt.dmcopy.punlearn()
    # infile = "{mask_filename}[sky=region({fov_file})][opt full]".format( # for ACIS-I & ACIS-S
    infile = "{mask_filename}[sky=region({fov_file}[ccd_id=0:3])][opt full]".format(  # for ACIS-I
        mask_filename=mask_filename,
        fov_file=observation.fov_file)
    outfile = observation.acisI_combined_mask
    clobber = True

    rt.dmcopy(infile=infile, outfile=outfile, clobber=clobber)

    print("Image mask created for {obsid} and saved as {filename}".format(
        obsid=observation.id, filename=outfile))

    io.delete(observation.temp_acis_comb_mask_filename)
Example #3
0
def runpipe5(cluster):
    # This portion of the pypeline
    combined_dir = cluster.combined_directory

    io.make_directory(combined_dir)

    while not io.file_exists(cluster.master_crop_file):
        print("Master crop file not found")
        run_ds9_for_master_crop(cluster)

    for observation in cluster.observations:

        # clean data
        infile = "{}[sky=region({})]".format(observation.clean,
                                             cluster.master_crop_file)
        outfile = cluster.temp_acisI_comb
        clobber = True

        rt.dmcopy.punlearn()
        rt.dmcopy(infile=infile, outfile=outfile, clobber=clobber)

        infile = "{}[bin sky=4][energy=700:8000]".format(
            cluster.temp_acisI_comb)
        outfile = observation.acisI_comb_img
        clobber = True

        print("ObsID: {}\t- Extracting just 0.7keV - 8keV.".format(
            observation.id))
        rt.dmcopy.punlearn()
        rt.dmcopy(infile=infile, outfile=outfile, clobber=clobber)

        # background
        infile = "{}[sky=region({})]".format(observation.back,
                                             cluster.master_crop_file)
        outfile = cluster.temp_backI_comb
        clobber = True

        rt.dmcopy.punlearn()
        rt.dmcopy(infile=infile, outfile=outfile, clobber=clobber)

        infile = "{}[bin sky=4][energy=700:8000]".format(
            cluster.temp_backI_comb)
        outfile = observation.backI_comb_img
        clobber = True

        rt.dmcopy.punlearn()
        rt.dmcopy(infile=infile, outfile=outfile, clobber=clobber)

        io.delete(cluster.temp_acisI_comb)
        io.delete(cluster.temp_backI_comb)

        make_mask_file(observation)
        make_cumulative_mask_file(cluster, observation)

    create_combined_images(cluster)
    make_nosrc_cropped_xray_sb(cluster)
Example #4
0
def runpipe5(cluster):
    print("runpipe5")
    from astropy.io import fits
    # This portion of the pypeline 
    combined_dir = cluster.combined_directory

    io.make_directory(combined_dir)

    while not io.file_exists(cluster.master_crop_file):
        print("Master crop file not found")
        run_ds9_for_master_crop(cluster)

    # the contents of this for loop should be refactored/replaced with the make_acisI_and_back function
    for observation in cluster.observations:
        infile = "{}[sky=region({})]".format(observation.clean, cluster.master_crop_file)
        outfile = cluster.temp_acisI_comb
        clobber = True

        rt.dmcopy.punlearn()
        rt.dmcopy(infile=infile, outfile=outfile, clobber=clobber)

        print("{} shape: {}".format(outfile, fits.open(outfile)[0].shape))

        infile = "{}[bin sky=4][energy=700:8000]".format(cluster.temp_acisI_comb)
        outfile = observation.acisI_comb_img
        clobber = True

        print("ObsID: {}\t- Extracting just 0.7keV - 8keV.".format(observation.id))
        rt.dmcopy.punlearn()
        rt.dmcopy(infile=infile, outfile=outfile, clobber=clobber)

        # background
        infile = "{}[sky=region({})]".format(observation.back, cluster.master_crop_file)
        outfile = cluster.temp_backI_comb
        clobber = True

        rt.dmcopy.punlearn()
        rt.dmcopy(infile=infile, outfile=outfile, clobber=clobber)

        infile = "{}[bin sky=4][energy=700:8000]".format(cluster.temp_backI_comb)
        outfile = observation.backI_comb_img
        clobber = True

        rt.dmcopy.punlearn()
        rt.dmcopy(infile=infile, outfile=outfile, clobber=clobber)

        io.delete(cluster.temp_acisI_comb)
        io.delete(cluster.temp_backI_comb)

        make_mask_file(observation)
        make_cumulative_mask_file(cluster, observation)

    create_combined_images(cluster)
    make_nosrc_cropped_xray_sb(cluster)
Example #5
0
def make_acisI_and_back_for(observation, cluster):
    from astropy.io import fits

    rt.dmcopy.punlearn()
    rt.dmcopy(
        infile="{clean_file}[sky=region({mask})]".format(
            clean_file=observation.clean, mask=cluster.master_crop_file),
        outfile=cluster.temp_acisI_comb,
        clobber=True
    )

    shp = fits.open(cluster.temp_acisI_comb)[0].shape
    print(observation.clean)
    print("{} shape {}".format(cluster.temp_acisI_comb,
                               shp))

    rt.dmcopy.punlearn()
    rt.dmcopy(
        infile="{temp_acisI_combined}[bin sky=4][energy=700:8000]".format(
            temp_acisI_combined=cluster.temp_acisI_comb),
        outfile=observation.acisI_comb_img,
        clobber=True
    )

    shp = fits.open(observation.acisI_comb_img)[0].shape

    print("{} shape {}".format(observation.acisI_comb_img,
                               shp))

    rt.dmcopy.punlearn()
    rt.dmcopy(
        infile="{back_file}[sky=region({mask})]".format(
            back_file=observation.back,
            mask=cluster.master_crop_file),
        outfile=cluster.temp_backI_comb,
        clobber=True
    )

    rt.dmcopy.punlearn()
    rt.dmcopy(
        infile="{temp_backI_combined}[bin sky=4][energy=700:8000]".format(temp_backI_combined=cluster.temp_backI_comb),
        outfile=observation.backI_comb_img,
        clobber=True
    )

    io.delete(cluster.temp_acisI_comb)
    io.delete(cluster.temp_backI_comb)
Example #6
0
def ciao_back(cluster, overwrite=False):
    print("Running ciao_back on {}.".format(cluster.name))

    for observation in cluster.observations:
        pcad_file = make_pcad_lis(cluster, observation.id)
        backI_lis = []
        backS_lis = []
        analysis_path = observation.analysis_directory
        filelist = io.read_contents_of_file(observation.ccd_merge_list).split('\n')
        pcad = io.read_contents_of_file(pcad_file)
        for acis_file in filelist:
            rt.acis_bkgrnd_lookup.punlearn()
            print("Finding background for {}".format(acis_file))
            path_to_background = rt.acis_bkgrnd_lookup(infile=acis_file)
            print("Found background at {}".format(path_to_background))
            acis_id = int(acis_file.split('/')[-1].split('.')[-2][-1])
            assert isinstance(acis_id, int), "acis_id = {}".format(acis_id)
            assert not isinstance(path_to_background, type(None)), "Cannot find background {}".format(acis_file)

            local_background_path = io.get_path("{}/back_ccd{}.fits".format(analysis_path, acis_id))
            try:
                if io.file_exists(local_background_path) and overwrite:
                    io.delete(local_background_path)
                io.copy(path_to_background, local_background_path)
            except OSError:
                print("Problem copying background file {}. Do you have the right permissions and a full CALDB?".format(
                    path_to_background))

                raise

            acis_gain = rt.dmkeypar(infile=acis_file,
                                    keyword="GAINFILE",
                                    echo=True)
            background_gain = rt.dmkeypar(infile=local_background_path,
                                          keyword="GAINFILE",
                                          echo=True)

            print("{}/{}/acis_ccd{}.fits gain: {}".format(cluster.name, observation.id, acis_id, acis_gain))
            print("{}/{}/back_ccd{}.fits gain: {}".format(cluster.name, observation.id, acis_id, background_gain))

            if dates_and_versions_match(acis_gain, background_gain):
                print("Date/version numbers don't match on the acis data and background. Reprocessing.")

                local_background_path = reprocess(cluster, observation.id, acis_gain, background_gain, acis_id)

            print("Reprojecting background")
            rt.reproject_events.punlearn()
            infile = local_background_path
            outfile = io.get_path("{local_path}/back_reproj_ccd{acis_id}.fits".format(local_path=analysis_path,
                                                                                      acis_id=acis_id))
            match = acis_file

            print(
                "Running:\n reproject_events(infile={infile}, outfile={outfile}, aspect={pcad}, match={match})".format(
                    infile=infile, outfile=outfile, pcad=pcad, match=match)
            )
            rt.reproject_events(infile=infile,
                                outfile=outfile,
                                aspect="{pcad_file}".format(pcad_file=pcad),
                                match=match,
                                random=0,
                                clobber=True)

            back_reproject = outfile
            datamode = rt.dmkeypar(infile=io.get_filename_matching(io.get_path("{}/acis*evt1*.fits".format(analysis_path))),
                                   keyword="DATAMODE")
            if datamode == "VFAINT":
                print("VFAINT Mode, resetting setting status bits")
                rt.dmcopy.punlearn()
                rt.dmcopy(infile="{}[status=0]".format(back_reproject),
                          outfile=outfile,
                          clobber=True)
            if acis_id <= 3:
                backI_lis.append(back_reproject)
            else:
                backS_lis.append(back_reproject)

        merged_back_list = backI_lis + backS_lis

        print("writing backI.lis and backS.lis")
        io.write_contents_to_file("\n".join(backI_lis), io.get_path("{}/backI.lis".format(analysis_path)),
                                  binary=False)
        io.write_contents_to_file("\n".join(backS_lis), io.get_path("{}/backS.lis".format(analysis_path)),
                                  binary=False)
        io.write_contents_to_file("\n".join(merged_back_list), observation.merged_back_lis, binary=False)

    return