Exemplo n.º 1
0
def subtract(args):
    for im_name in args.input1:
        if args.overwrite:
            new_name = im_name
        elif args.suffix:
            new_name = utilities.add_suffix_prefix(im_name, suffix=args.suffix)

        # Read image, separate data and header
        im = fits.open(im_name)
        data = im[0].data
        hdr = im[0].header

        # Extract the overscan region. Notice that what we call x,y are the second and first axes
        y0, y1, x0, x1 = args.region
        overscan = data.copy()[x0:x1, y0:y1]


        # Average over the short axis
        if overscan.shape[0] < overscan.shape[1]:
            average = numpy.nanmedian(overscan, axis=0)
            # Fit a polynomial and return the fitted values
            fitted_overscan = fit_pol(average, args.deg)
            data[:, y0:y1] -= fitted_overscan
        else:
            average = numpy.nanmedian(overscan, axis=1)
            # Fit a polynomial and return the fitted values
            fitted_overscan = fit_pol(average, args.deg)
            data[x0:x1, :] = (data[x0:x1, :].T - fitted_overscan).T


        # Write to the output file
        hdr.add_comment("Overscan region subtracted. Region: [{0}:{1},{2}:{3}]".format(x0, x1, y0, y1))
        fits.writeto(new_name, data, hdr, clobber=True)

    return None
Exemplo n.º 2
0
def trim(args):
    # Define region to be trimmed
    y0, y1, x0, x1 = args.region

    # args.output should be a list of output names. If they do not exist, the outputs should be the same as the
    # inputs with whatever suffix, if any, the user gave
    if not args.output:
        args.output = [utilities.add_suffix_prefix(im_name, suffix=args.suffix) for im_name in args.input]

    # Do the actual trimming. We will do it first into a temporary file, then copy it into args.output. This is just
    # in case the output and input filenames are the same, or if the output exists. IRAF will not overwrite!
    for im_name, new_name in zip(args.input, args.output):
        basename = os.path.splitext(os.path.basename(im_name))[0]
        _, temp_output = tempfile.mkstemp(prefix=basename, suffix=".fits")
        os.unlink(temp_output )
        with utilities.tmp_mute():
            imcopy(im_name + "[{0}:{1}, {2}:{3}]".format(x0, x1, y0, y1), temp_output)
            shutil.move(temp_output, new_name)

            # If a mask exists, trim exactly equally
            if args.mask_key:
                maskname = fits.getheader(im_name)[args.mask_key]
                with fits.open(maskname, 'readonly') as mask_im:
                    mask_im[0].data = mask_im[0].data[y0:y1+1, x0:x1+1]
                    mask_output = utilities.replace_extension(new_name, ".fits.msk")
                    fits.writeto(mask_output, mask_im[0].data, mask_im[0].header, clobber=True)
                utilities.header_update_keyword(new_name, "MASK", os.path.abspath(mask_output),
                                                comment="Name of mask image. ")

    return args.output
Exemplo n.º 3
0
def remove_cosmics(args):

    for im_name in args.input:
        if args.output != '':
            newfile = args.output
        else:
            newfile = utils.add_suffix_prefix(im_name, prefix = args.prefix, \
                                            suffix = args.suffix )

        # Read the FITS :
        array, header = cosmics.fromfits(im_name)
        # Build the object :
        im = astroim.Astroim(im_name)

        # If object is a flat, bias or flat, do not remove_cosmics
        if im.target.objtype in ["bias", "domeflat", "skyflat", "flat"]:
            continue

        gain = im.primary_header.get(im.primary_header.gaink)
        readnoise = im.primary_header.get(im.primary_header.ccdronk)
        c = cosmics.cosmicsimage(array, gain = float(gain), sigfrac = 0.3, \
                                 readnoise = float(readnoise), objlim = 7.0, \
                                 sigclip = float(args.sigclip))
        # Run the full artillery :
        c.run(maxiter = int(args.maxiter))

        # Write the cleaned image into a new FITS file, conserving the header:
        cosmics.tofits(newfile, c.cleanarray, header)

        # If you want the mask, here it is :
        if args.mask == True:
            mask = utils.add_suffix_prefix(newfile, suffix="-cosmic_mask")
            cosmics.tofits(mask, c.mask, header)

        # And write info to the header:
        im = fits.open(newfile, mode='update')
        hdr = im[0].header
        hdr.add_history("COSMIC RAYS REMOVED:")
        oldname = os.path.split(im_name)[1]
        newname = os.path.split(newfile)[1]
        hdr.add_history(oldname + " --> " + newname)
        hdr.add_history("Parameters used by cosmics.py. Gain=" + str(gain) + \
                        ", sigfrac=0.3, objlim=7.0, sigclip=" + args.sigclip + \
                        ", readnoise=" + str(readnoise))
        im.flush()
        im.close()
    return newfile
Exemplo n.º 4
0
def remove_cosmics(args):
    if args.output != '':
        newfile = args.output
    else:
        newfile = utils.add_suffix_prefix(args.input[0], prefix = args.prefix, \
                                        suffix = args.suffix )
    
    # Read the FITS :
    array, header = cosmics.fromfits(args.input[0])
    # Build the object :
    c = cosmics.cosmicsimage(array, gain = float(args.gain), sigfrac = 0.3, \
                             readnoise = float(args.readnoise), objlim = 5.0, \
                             sigclip = float(args.sigclip))
    # Run the full artillery :
    c.run(maxiter = int(args.maxiter))
    
    # Write the cleaned image into a new FITS file, conserving the header:
    cosmics.tofits(newfile, c.cleanarray, header)
    
    # If you want the mask, here it is :
    if args.mask == True:
        mask = utils.add_suffix_prefix(newfile, prefix="cosmic_mask")    
        cosmics.tofits(mask, c.mask, header)
                          
    # And write info to the header:
    im = fits.open(newfile, mode='update')
    hdr = im[0].header
    hdr.add_history("COSMIC RAYS REMOVED:")
    oldname = os.path.split(args.input[0])[1]
    newname = os.path.split(newfile)[1]
    hdr.add_history(oldname + " --> " + newname)
    hdr.add_history("Parameters used by cosmics.py. Gain=" + args.gain + \
                    ", sigfrac=0.3, objlim=5.0, sigclip=" + args.sigclip + \
                    ", readnoise=" + args.readnoise)   
    im.flush()
    im.close()                
    return newfile
Exemplo n.º 5
0
def match(args):
    """ Match the PSF of a group of images with the PSF of a reference image.
    """
    sort_by_seeing(args)

    # Get seeing from header and calculate psf of reference image, the first one since we sorted the input
    ref_seeing = utils.get_from_header(args.input[0], args.FWHM_key)
    ref_psf = psf.main(arguments=[args.input[0], "--stars", args.input_stars[0],
                                  "--sigma_key", args.sigma,
                                  "--gain_key",  args.gain_key,  
                                  "--ron_key",   args.ron_key,  
                                  "--expt_key",  args.expt_key, 
                                  "--airm_key",  args.airm_key, 
                                  "--FWHM_key", args.FWHM_key])
 
    output_list = []
    for image, stars in zip(args.input, args.input_stars):
        output = utils.add_suffix_prefix(image, suffix=args.suffix)

        # Too small differences of seeing are not worh doing any matching
        current_seeing = utils.get_from_header(image, args.FWHM_key)
        if abs(ref_seeing - current_seeing) < args.limit:  # Not worth equating PSFs for small differences
            shutil.copy(image, output)  # copy old file into new one
        else:
            # Calculate psf of the other images
            psf_object =  psf.main(arguments=[image,
                                              "--stars",     stars,
                                              "--sigma_key", args.sigma,
                                              "--gain_key",  args.gain_key,
                                              "--ron_key",   args.ron_key,
                                              "--expt_key",  args.expt_key,
                                              "--airm_key",  args.airm_key,
                                              "--FWHM_key", args.FWHM_key])
            utils.if_exists_remove("kernel.fits", output)
            iraf.psfmatch(image, ref_psf, psf_object, "kernel.fits",
                                 convolution="psf", 
                                 filter="cosbell")                   
            iraf.psfmatch(image, ref_psf, psf_object,
                                 convolution="kernel", 
                                 kernel="kernel.fits", 
                                 output=output,
                                 verbose="no")
            utils.if_exists_remove("kernel.fits")
            mssg = "Before equating PSFs: " + str(current_seeing)
            utils.header_update_keyword(output, args.FWHM_key, ref_seeing, comment=mssg)
        output_list.append(output)
    return output_list
Exemplo n.º 6
0
def main(arguments=None):
    if arguments is None:
        arguments = sys.argv[1:]
    args = parser.parse_args(arguments)
    
    # In order to allow "  -c" to be able to use the hyphen as suffix.
    if args.suffix != "":
        args.suffix = args.suffix[0].strip()
    
    # Detecting errors
    if args.suffix == '' and args.overwrite == False:  
        sys.exit("Error! Introduce a suffix or activate the --overwrite option. \
		  For help: python arith.py -h ") 
   
    for im in args.inputs:
        if args.suffix != "":
            output_file = utilities.add_suffix_prefix(im, suffix=args.suffix)
        elif args.overwrite:
            output_file = im
        build(im, output_file)
Exemplo n.º 7
0
def filter_image(args):
    """ Routine that uses a median filter on a list of masked images. """
    output_list = []
    for image in args.input:
        # Read image, mask and header
        im = utils.read_image_with_mask(image, mask_keyword=args.mask_key)
        hdr = fits.getheader(image)
        
        # skimage uses masks where 1 means valid and 0 invalid
        mask = (im.mask + 1) % 2
        filt_im = skimages.filter.median_filter(im.data, mask=mask, radius=args.radius)
        
        # Make name of file if not given
        if args.output == "":
            output = utils.add_suffix_prefix(image, suffix='-mf')
        else :
            output = args.output
        
        # Add history line to the header and write to file
        hdr.add_history("- Image median filtered. Radius = " + str(args.radius))
        fits.writeto(output, filt_im, header=hdr)
        output_list.append(output)
    return output_list
Exemplo n.º 8
0
def main(Ha_name, rgunn_name, scaling_factor, zp_Ha, T_Ha, T_rGunn):
    # Tell the user to load regions for the galaxy
    galaxy_messg = "\n Create/load region(s) in ds9 to enclose the galaxy. "+\
              " Save it for latter use. Then hit the 'OK' button!"
    galaxy_region = mask_from_ds9(rgunn_name, galaxy_messg)

    # Sky regions
    sky_messg = "\n Now do the same for sky region (or regions!) "
    sky_region =   mask_from_ds9(rgunn_name, sky_messg)

    # Stars to model PSF
    mod_stars_catalogue = utils.replace_extension(rgunn_name, ".model_stars")
    mod_stars_messg = "\n Select nice isolated non-saturated stars to model the PSF. "
    mod_stars_region = positions_of_stars_from_ds9(rgunn_name, mod_stars_messg, mod_stars_catalogue)

    # Stars to be subtracted
    subt_stars_catalogue = utils.replace_extension(rgunn_name, ".subt_stars")
    subt_stars_messg = "\n Finally, select the stars to be subtracted from the images"
    subt_stars_region = positions_of_stars_from_ds9(rgunn_name, subt_stars_messg, subt_stars_catalogue)

    # Now remove the stars from the images
    output_Ha = utils.add_suffix_prefix(Ha_name, suffix='-s')
    extract_stars.main(arguments=['--model_stars', mod_stars_catalogue,
                                  "--subt_stars", subt_stars_catalogue,
                                  '--coords', 'world',
                                  '--output', output_Ha,
                                  Ha_name])
    output_rgunn = utils.add_suffix_prefix(rgunn_name, suffix='-s')
    extract_stars.main(arguments=['--model_stars', mod_stars_catalogue,
                                  "--subt_stars", subt_stars_catalogue,
                                  '--coords', 'world',
                                  '--output', output_rgunn,
                                  rgunn_name])

    # Read images
    image_Ha = fits.open(output_Ha)
    Ha_data = np.array(image_Ha[0].data, dtype=np.float64)
    Ha_header = image_Ha[0].header
    Ha_shape = Ha_data.shape

    image_R = fits.open(output_rgunn)
    R_data = np.array(image_R[0].data, dtype=np.float64)
    R_header  = image_R[0].header
    R_shape = R_data.shape

    # Halpha image: get galaxy mask, sky mask, subtract sky from galaxy flux
    Ha_gal_mask = galaxy_region.as_imagecoord(Ha_header).get_mask(shape=Ha_shape)
    Ha_sky_mask = sky_region.as_imagecoord(Ha_header).get_mask(shape=Ha_shape)
    Ha_sky_flux = np.median(Ha_data[Ha_sky_mask == 1])
    Ha_data_nosky = Ha_data - Ha_sky_flux
    Ha_and_R_counts = np.sum( Ha_data_nosky[Ha_gal_mask == 1])
    print "Narrow Ha filter counts: ", Ha_and_R_counts

    # Same for R image: get galaxy mask, sky mask, subtract sky from galaxy flux
    R_gal_mask = galaxy_region.as_imagecoord(R_header).get_mask(shape=R_shape)
    R_sky_mask = sky_region.as_imagecoord(R_header).get_mask(shape=R_shape)
    R_sky_flux = np.median(R_data[R_sky_mask == 1])
    R_data_nosky = R_data - R_sky_flux
    R_counts = np.sum( R_data_nosky[R_gal_mask == 1])
    print "R filter counts: ", R_counts


    # Halpha is basically the subtractiong of the counts in Halpha filter minus the scaled R counts
    # but more precisely, the contrinbution of Halpha line to the filter rGunn should be corrected.
    # For:
    #   - a transmittance T(Gunn) of redshifted Halpha in the rGunn filter
    #   - T(Ha_filter) the transmittance of redshifted Halpha in the Halpha narrow filter
    #   - scaling_factor the factor of scale between counts in filter rGunn and in the Ha filter (typically from stars)
    #   - rgunn_scaled the number of counts in the Gunn filter, already scaled to match Halpha
    #   - Ha_filter is the counts in the narrow Ha filter, which obviously contain a contribution from R.
    #   - Halpha the number of counts of the Halpha line in the Halpha filter (the contribution of Halpha alone)
    #
    # we would need to subtract the Halpha contribution to rGunn before we scale it, but since we are starting
    # already from a scaled version of rGunn, we need to divide the contribution from Halpha by the scaling factor.
    # The system of equations would be:
    #
    #  R_counts = rgunn_scaled - Halpha * T(Gunn)/T(Ha_filter) / scaling_factor
    #  Halpha = Ha_filter - R_counts
    #
    # with solution:
    #  Halpha = (Ha_filter - rgunn_scaled) * scaling_factor/(scaling_factor - T(Gunn)/T(Halpha)
    #
    # which gives, for a typical scaling factor of ~11 and similar transmittances in both filters a correction
    # of the order of ~10%.

    Halpha = (Ha_and_R_counts - R_counts * scaling_factor) / (1 - scaling_factor * T_rGunn / T_Ha)
    print "Halpha counts: ", Halpha
    print Halpha  / T_Ha * 10**(-zp_Ha/2.5)



    mask = np.logical_or(Ha_gal_mask, Ha_sky_mask)
    d = ds9.ds9()
    d.set_np2arr(Ha_data * mask, dtype=np.float64)
Exemplo n.º 9
0
                         str(DEC), "--radius", str(FoV), "--depth", "100,250", 
                         "--solved", "solved.txt", np.str(image)]) 


sys.exit()

print "Estimate seeing from images"
for index, image in enumerate(list_images["filename"]):
    if list_images["type"][index] in ("cig", "standards", "clusters"):
        # Victor Terron has promissed changing dirs will soon be unnecessary 
        curdir = os.path.abspath(os.curdir)
        os.chdir(lemon_dir)
        import lemon.seeing as seeing
        seeing.main(arguments=["--margin", "0", "--filename", '', "--suffix",
                               "-s", image, os.path.split(image)[0] ])
        newname = utilities.add_suffix_prefix(image, suffix = "-s")

        os.chdir(curdir)
        list_images["filename"][index] = newname
            
        # While running lemon.seeing a sextractor catalogue is produced. 
        catalog = fits.getheader(newname)["SEX CATALOG"]
        catalog_newname = utilities.replace_extension(newname, ".cat")
        shutil.copy(catalog, catalog_newname)
        # Damn FITS format and its constraints!
        short_name = os.path.split(catalog_newname)[1]
        utilities.header_update_keyword(newname, "SEX CATALOG", short_name)

print "Estimate sky for images of CIG(s), standard(s) and cluster(s) "
for index, image in enumerate(list_images["filename"]):
    if list_images["type"][index] in ["cig","standards","clusters"]:
Exemplo n.º 10
0
def include_wcs(args):
    # Copy input names into output names
    output_names = args.input[:]
    for ii, im_name in enumerate(args.input):
        # If it is a domeflat, skyflat or bias image, calculating the astrometry makes little sense
        im = astroim.Astroim(im_name)
        if im.target.objtype in ["bias", "domeflat", "skyflat", "flat"]:
            continue

        # Prepare my output file  for the resulting image
        if args.suffix:
            new_file = utilities.add_suffix_prefix(im_name, suffix=args.suffix)
        elif args.overwrite:
            new_file = im_name

        # Copy input image into a temporary file, so that we can modify it freely, for example, remove cosmics
        # or filter it (to be done).
        basename = get_basename(im_name)
        _, input_image = tempfile.mkstemp(prefix=basename, suffix=".fits")
        shutil.copy2(im_name, input_image)

        # Remove cosmics
        if args.cosmics:
            perform_cosmic_removal(input_image)

        # The output of the WCS process of astrometry.net will go to a .new file, the coordinates to a .coor
        output_wcs = utilities.replace_extension(input_image, ".new")
        solved_file = utilities.replace_extension(input_image, ".solved")
        corrfile = utilities.replace_extension(input_image, ".cor")

        # Try first with the defaults of astrometry
        arguments_def = ["solve-field", "--no-plots", "--no-fits2fits", "--dir", "/tmp", "--overwrite",
                      "--new-fits", output_wcs, "--corr", corrfile, "--cpulimit", "1", input_image]
        try:  # Try to add the RA, DEC, Radius options to constrain the search
            ra, dec = im.primary_header.get(im.primary_header.RAk, im.primary_header.DECk)
            ra, dec = utilities.sex2deg(ra, dec)
            arguments = arguments_def + ["--ra", str(ra), "--dec", str(dec), "--radius", str(args.radius),
                                      "--cpulimit", "20"]
        except:
            arguments = arguments_def
        print "Trying to find WCS with astrometry standard keywords. "
        with utilities.tmp_mute():
            subprocess.call(arguments)

        # Now we will try using sextractor
        build_default_sex(args)
        # To avoid having too much residual crap in the folder, the output of astrometry will go to tmp (--dir /tmp).
        arguments0 = ["solve-field", "--no-plots", "--no-fits2fits", "--use-sextractor", "--dir", "/tmp",
                      "--x-column", "X_IMAGE", "--y-column", "Y_IMAGE", "--sort-column", "MAG_AUTO",
                      "--sort-ascending", "--sextractor-config", os.path.join(repipy_path, "default.sex"),
                      "--overwrite", "--new-fits", output_wcs, "--corr", corrfile, input_image]
        arguments0 += args.extras

        try:  # Try to add the RA, DEC, Radius options to constrain the search
            ra, dec = im.primary_header.get(im.primary_header.RAk, im.primary_header.DECk)
            ra, dec = utilities.sex2deg(ra, dec)
            arguments = arguments0 + ["--ra", str(ra), "--dec", str(dec), "--radius", str(args.radius),
                                      "--cpulimit", "20"]
        except:
            arguments = arguments0

        # Run astrometry, in case of not solving it on the first attempt, try fitting freely (no RA, DEC used)
        if not os.path.exists(solved_file):
            subprocess.call(arguments)

        if not os.path.exists(solved_file):
            subprocess.call(arguments0)


        # Only if we have a solution
        if os.path.exists(solved_file):
            # copy the input file into the new file if they are not the same
            if os.path.abspath(im_name) != os.path.abspath(new_file):
                shutil.copy2(im_name, new_file)

            # get WCS from the resulting file, only the part added by astrometry.net
            with fits.open(output_wcs) as file_wcs:
                hdr_wcs = file_wcs[0].header
                ind = hdr_wcs.values().index('Original key: "END"')  # new part added by astrometry
                hdr_wcs = hdr_wcs[ind:]

            # Add that header to the original one of the image
            with fits.open(new_file, 'update') as new_im:
                new_im[0].header += hdr_wcs
                new_im.flush()

            # Build a catalogue of RA, DEC for the stars found
            with fits.open(corrfile) as table:
                cat_radec = utilities.replace_extension(new_file, "radec")
                with open(cat_radec, "w") as f:
                    for ra, dec in zip(table[1].data["field_ra"], table[1].data["field_dec"]):
                        f.write(str(ra) + " " + str(dec) + "\n")
            output_names[ii] = new_file

            # If --remove_original is True
            if args.remove_orig and not args.overwrite:
                os.unlink(im_name)

    return output_names
Exemplo n.º 11
0
def combine(args):
    # Create the folders that do not already exist for the output file
    outdir, outfile = os.path.split(os.path.abspath(args.output))
    if outdir == "":
        outdir = "."

    utils.if_dir_not_exists_create(outdir)

    # Build a list of the filter of each image
    #filter_list, images_filters = build_filter_list(args)
    images_filters = utils.collect_from_images(args.input, args.filterk)

    # If user wants all images to be combined together, regardless of filter:
    if args.all_together:
        images_filters = ["AllFilters"] * len(images_filters)

    # Create a default dictionary for the resulting images
    result = collections.defaultdict(str)    
    
    # For each of the filters present combine the images (and write)
    for filt in set(images_filters):
        # list of objects with current filter (exception: allfilters is true)
        list1 = [args.input[p] for p,f in enumerate(images_filters) if f == filt ]

        # Check that all images have same dimension. If not, exit program
        if not utils.check_dimensions(list1):
            sys.exit("Dimensions of images to combine are different!")

        # Calculate scale of images
        scales = compute_scales(list1, args.scale, args.mask_key)


        # Get the sizes of the images
        lx, ly = utils.get_from_header(list1[0], "NAXIS2", "NAXIS1")

        # Now, in order to avoid loading many images in memory, we need to slice the images in pieces and combine a slice
        # of all the images at a time
        n_slices = 32          # divide the slow axis in this many pieces

        # Define the whole image and set all elements of mask to False
        whole_image = numpy.ma.zeros([lx,ly])
        whole_image.mask = numpy.zeros_like(whole_image.data)

        for xmin in range(0, lx, lx/n_slices):
            xmax = min(xmin + lx/n_slices, lx)

            # Now we can build and sort a section of the cube with all the images
            cube = cube_images(list1, args.mask_key, scales, limits=[xmin, 0, xmax, ly])
            cube.sort(axis=0)

            # Finally, average! Remember that the cube is sorted so that
            # cube[0,ii,jj] < cube[1,ii,jj] and that the highest values of all
            # are the masked elements. We will take advantage of it if the median
            # is selected, because nowadays the masked median is absurdly slow:
            # https://github.com/numpy/numpy/issues/1811
            map_cube = numpy.ma.count(cube, axis=0) # number non-masked values per pixel
            if args.average == "mean":
                image = numpy.ma.mean(cube, axis=0)
                non_masked_equivalent = numpy.mean(cube.data, axis=0)
            elif args.average == "median":
                image = home_made_median(map_cube, cube)
                non_masked_equivalent = numpy.median(cube.data, axis=0)

            # Image is a masked array, we need to fill in masked values with the
            # args.fill_val if user provided it. Also, values with less than
            # args.nmin valid values should be masked out. If user did not provide
            # a fill_val argument, we will substitute masked values with the
            # unmasked equivalent operation.
            image.mask[map_cube < args.nmin] = 1
            mask = image.mask
            if args.fill_val != '':
                image = image.filled(args.fill_val)
            else:
                image.data[mask == True] = non_masked_equivalent[mask == True]
                image = image.data

            whole_image.data[xmin:xmax, 0:ly] = image[:,:]
            whole_image.mask[xmin:xmax, 0:ly] = mask[:,:]

        # And save images. If all_together is activated, use the file name given by user. If not, we need
        # to separate by filter, so compose a new name with the one given by the user adding the filter
        if args.all_together:
            newfile = args.output
        else:
            newfile = os.path.join(outdir, utils.add_suffix_prefix(outfile, suffix="_" + filt) )

        if args.out_mask != "":
            name_mask = args.out_mask
        else:
            name_mask = newfile + ".msk"
        if os.path.isfile(newfile):
            os.remove(newfile)
        if os.path.isfile(name_mask):
            os.remove(name_mask)
        fits.writeto(newfile, whole_image.data)
        fits.writeto(name_mask, whole_image.mask.astype(numpy.int))
        result[filt] = newfile

        # Add comments to the headers
        string1 = " - Image built from the combination of the images: "+\
                 ", ".join(list1)
        string2 = " combine = " + args.average + ", scale = " + args.scale
        utils.add_history_line(newfile, string1 + string2 )
        utils.add_history_line(name_mask, " - Mask of image: " + newfile)
        if args.mask_key != "":
            utils.header_update_keyword(newfile, args.mask_key, name_mask,
                                        "Mask for this image")

        # To normalize calculate median and call arith_images to divide by it.
        if args.norm == True:
            median = compute_scales([newfile], args.scale, args.mask_key)[0]
            msg =  "- NORMALIZED USING MEDIAN VALUE:"
            arith_images.main(arguments=["--message", msg, "--output", newfile,
                                         "--mask_key", args.mask_key, "--fill_val",
                                         args.fill_val, newfile, "/", str(median)])
    return result
Exemplo n.º 12
0
    stars_images = [utils.replace_extension(ii, "radec") for ii in obj_images]
    output_images = match_psfs.main(["--input_stars"] + stars_images + ["--suffix", " -p",
                                                                        "--sigma_key", sky_stdk,
                                                                        "--gain_key", gaink,
                                                                        "--ron_key", read_noisek,
                                                                        "--expt_key", exptimek,
                                                                        "--airm_key", airmassk,
                                                                        "--FWHM_k", seeingk] + obj_images)
    list_images["filename"][np.where(list_images["objname"] == obj)] = output_images


print "Normalize using exposure time. "
for ii, im_name in enumerate(list_images["filename"]):
    if list_images["type"][ii] in ["cig", "clusters"]:
        tt = float(utils.get_from_header(im_name, exptimek))
        newname = utils.add_suffix_prefix(im_name, suffix="-t")
        mssg = "Normalized to exptime (divided by " + str(tt) + ")"
        arith_images.main(arguments=["--output", newname, "--message", mssg, "--mask_key", "MASK", im_name, "/", str(tt)])
        mssg = "Before normalizing: " + str(tt)
        # Update values for the exptime, the sky, the sky std...
        utils.header_update_keyword(newname, exptimek, 1, mssg)
        ss = float(utils.get_from_header(im_name, skyk))
        utils.header_update_keyword(newname, skyk, ss/tt)
        ss_std = float(utils.get_from_header(im_name, sky_stdk))
        utils.header_update_keyword(newname, sky_stdk, ss_std/tt)
        list_images["filename"][ii] = newname


print "Correct for atmospheric extinction."
for ii, im_name in enumerate(list_images["filename"]):
    if list_images["type"][ii] in ["cig", "clusters", "standards"]:
Exemplo n.º 13
0
    stars_images = [utils.replace_extension(ii, "radec") for ii in obj_images]
    output_images = match_psfs.main(["--input_stars"] + stars_images + ["--suffix", " -p",
                                                                        "--sigma_key", sky_stdk,
                                                                        "--gain_key", gaink,
                                                                        "--ron_key", read_noisek,
                                                                        "--expt_key", exptimek,
                                                                        "--airm_key", airmassk,
                                                                        "--FWHM_k", seeingk] + obj_images)
    list_images["filename"][np.where(list_images["objname"] == obj)] = output_images


print "Normalize using exposure time. "
for ii, im_name in enumerate(list_images["filename"]):
    if list_images["type"][ii] in ["cig", "clusters"]:
        tt = float(utils.get_from_header(im_name, exptimek))
        newname = utils.add_suffix_prefix(im_name, suffix="-t")
        mssg = "Normalized to exptime (divided by " + str(tt) + ")"
        arith_images.main(arguments=["--output", newname, "--message", mssg, "--mask_key", "MASK", im_name, "/", str(tt)])
        mssg = "Before normalizing: " + str(tt)
        # Update values for the exptime, the sky, the sky std...
        utils.header_update_keyword(newname, exptimek, 1, mssg)
        ss = float(utils.get_from_header(im_name, skyk))
        utils.header_update_keyword(newname, skyk, ss/tt)
        ss_std = float(utils.get_from_header(im_name, sky_stdk))
        utils.header_update_keyword(newname, sky_stdk, ss_std/tt)
        list_images["filename"][ii] = newname


print "Correct for atmospheric extinction."
for ii, im_name in enumerate(list_images["filename"]):
    if list_images["type"][ii] in ["cig", "clusters", "standards"]: