whr = np.where(list_images["type"] == "blanks") create_masks.main(arguments=["--max_val", "30000", "--min_val", "1000", "--stars", "--circular"] + list(list_images["filename"][whr])) # Combine bias images print "Combining bias images" whr = np.where(list_images["type"] == "bias") bias_images = list(list_images["filename"][whr]) superbias = combine_images.main(arguments=["--average", "median", "--all_together", "--output", "superbias.fits", "--mask_key", "mask", "--filterk", filterk] + bias_images[:]) # Subtract bias from all images. print "Subtracting bias" newname = arith.main(arguments=["--suffix", " -b", "--message", "BIAS SUBTRACTED", "--mask_key", "mask"] + list(list_images["filename"]) + [ "-", superbias["AllFilters"]]) list_images["filename"][:] = newname # Combine skyflats using blocks to distinguish between sunset and sunrise flats. print "Combining sky flats" skyflat_indices = np.where(list_images["type"] == "skyflats") times = list_images["time"][skyflat_indices] # times of the skyflat images block_limits = utilities.group_images_in_blocks(times, limit=20) master_skyflats = {} for ii in range(len(block_limits)-1): block = list_images["filename"][skyflat_indices][block_limits[ii]:block_limits[ii+1]] time_block = utilities.mean_datetime(list_images["time"][skyflat_indices] [block_limits[ii]:block_limits[ii+1]] ) skyflat = combine_images.main(arguments=["--average", "median", "--norm", "--scale", "median",
print "Bias images", bias_images output_bias = os.path.join(directory, "superbias.fits") superbias = combine_images.main(arguments=["--average", "median", "--all_together", "--output", output_bias, "--mask_key", "mask", "--filterk", filterk] +\ bias_images[:]) print "Subtract bias" for ii, im in enumerate(list_images["filename"]): if list_images["type"][ii] not in ["bias", "unknown"]: args = ["--suffix", " -b", "--message", "BIAS SUBTRACTED", "--mask_key", "mask", im, "-", superbias["AllFilters"]] if type_of_bias_subtraction: args = [type_of_bias_subtraction] + args newname = arith.main(arguments=args) list_images["filename"][ii] = newname print "Combine flats" output_flats = os.path.join(directory, "masterskyflat.fits") flat_indices = np.where(list_images["type"] == "skyflats") flats = combine_images.main(arguments=["--average", "median", "--norm", "--scale", "median", "--output", output_flats, "--mask_key", "mask", "--filterk", filterk] + list(list_images["filename"][flat_indices])) print "Correct flat-field" for ii,im in enumerate(list_images["filename"]): if list_images["type"][ii] not in ["bias", "unknown"]: # Guess the filter of the image from the name, find correct flat
def combine(args): # Create the folders that do not already exist for the output file outdir, outfile = os.path.split(os.path.abspath(args.output)) if outdir == "": outdir = "." utils.if_dir_not_exists_create(outdir) # Build a list of the filter of each image #filter_list, images_filters = build_filter_list(args) images_filters = utils.collect_from_images(args.input, args.filterk) # If user wants all images to be combined together, regardless of filter: if args.all_together: images_filters = ["AllFilters"] * len(images_filters) # Create a default dictionary for the resulting images result = collections.defaultdict(str) # For each of the filters present combine the images (and write) for filt in set(images_filters): # list of objects with current filter (exception: allfilters is true) list1 = [args.input[p] for p,f in enumerate(images_filters) if f == filt ] # Check that all images have same dimension. If not, exit program if not utils.check_dimensions(list1): sys.exit("Dimensions of images to combine are different!") # Calculate scale of images scales = compute_scales(list1, args.scale, args.mask_key) # Get the sizes of the images lx, ly = utils.get_from_header(list1[0], "NAXIS2", "NAXIS1") # Now, in order to avoid loading many images in memory, we need to slice the images in pieces and combine a slice # of all the images at a time n_slices = 32 # divide the slow axis in this many pieces # Define the whole image and set all elements of mask to False whole_image = numpy.ma.zeros([lx,ly]) whole_image.mask = numpy.zeros_like(whole_image.data) for xmin in range(0, lx, lx/n_slices): xmax = min(xmin + lx/n_slices, lx) # Now we can build and sort a section of the cube with all the images cube = cube_images(list1, args.mask_key, scales, limits=[xmin, 0, xmax, ly]) cube.sort(axis=0) # Finally, average! Remember that the cube is sorted so that # cube[0,ii,jj] < cube[1,ii,jj] and that the highest values of all # are the masked elements. We will take advantage of it if the median # is selected, because nowadays the masked median is absurdly slow: # https://github.com/numpy/numpy/issues/1811 map_cube = numpy.ma.count(cube, axis=0) # number non-masked values per pixel if args.average == "mean": image = numpy.ma.mean(cube, axis=0) non_masked_equivalent = numpy.mean(cube.data, axis=0) elif args.average == "median": image = home_made_median(map_cube, cube) non_masked_equivalent = numpy.median(cube.data, axis=0) # Image is a masked array, we need to fill in masked values with the # args.fill_val if user provided it. Also, values with less than # args.nmin valid values should be masked out. If user did not provide # a fill_val argument, we will substitute masked values with the # unmasked equivalent operation. image.mask[map_cube < args.nmin] = 1 mask = image.mask if args.fill_val != '': image = image.filled(args.fill_val) else: image.data[mask == True] = non_masked_equivalent[mask == True] image = image.data whole_image.data[xmin:xmax, 0:ly] = image[:,:] whole_image.mask[xmin:xmax, 0:ly] = mask[:,:] # And save images. If all_together is activated, use the file name given by user. If not, we need # to separate by filter, so compose a new name with the one given by the user adding the filter if args.all_together: newfile = args.output else: newfile = os.path.join(outdir, utils.add_suffix_prefix(outfile, suffix="_" + filt) ) if args.out_mask != "": name_mask = args.out_mask else: name_mask = newfile + ".msk" if os.path.isfile(newfile): os.remove(newfile) if os.path.isfile(name_mask): os.remove(name_mask) fits.writeto(newfile, whole_image.data) fits.writeto(name_mask, whole_image.mask.astype(numpy.int)) result[filt] = newfile # Add comments to the headers string1 = " - Image built from the combination of the images: "+\ ", ".join(list1) string2 = " combine = " + args.average + ", scale = " + args.scale utils.add_history_line(newfile, string1 + string2 ) utils.add_history_line(name_mask, " - Mask of image: " + newfile) if args.mask_key != "": utils.header_update_keyword(newfile, args.mask_key, name_mask, "Mask for this image") # To normalize calculate median and call arith_images to divide by it. if args.norm == True: median = compute_scales([newfile], args.scale, args.mask_key)[0] msg = "- NORMALIZED USING MEDIAN VALUE:" arith_images.main(arguments=["--message", msg, "--output", newfile, "--mask_key", args.mask_key, "--fill_val", args.fill_val, newfile, "/", str(median)]) return result
"--sigma_key", sky_stdk, "--gain_key", gaink, "--ron_key", read_noisek, "--expt_key", exptimek, "--airm_key", airmassk, "--FWHM_k", seeingk] + obj_images) list_images["filename"][np.where(list_images["objname"] == obj)] = output_images print "Normalize using exposure time. " for ii, im_name in enumerate(list_images["filename"]): if list_images["type"][ii] in ["cig", "clusters"]: tt = float(utils.get_from_header(im_name, exptimek)) newname = utils.add_suffix_prefix(im_name, suffix="-t") mssg = "Normalized to exptime (divided by " + str(tt) + ")" arith_images.main(arguments=["--output", newname, "--message", mssg, "--mask_key", "MASK", im_name, "/", str(tt)]) mssg = "Before normalizing: " + str(tt) # Update values for the exptime, the sky, the sky std... utils.header_update_keyword(newname, exptimek, 1, mssg) ss = float(utils.get_from_header(im_name, skyk)) utils.header_update_keyword(newname, skyk, ss/tt) ss_std = float(utils.get_from_header(im_name, sky_stdk)) utils.header_update_keyword(newname, sky_stdk, ss_std/tt) list_images["filename"][ii] = newname print "Correct for atmospheric extinction." for ii, im_name in enumerate(list_images["filename"]): if list_images["type"][ii] in ["cig", "clusters", "standards"]: airmass = float(utils.get_from_header(im_name, airmassk)) correcting_factor = 10**(ext_coeff * airmass / 2.5)