def imsub_all_fits(params, reference='ref.fits'): # # Create the output directory if it doesn't exist # if not (os.path.exists(params.loc_output)): os.mkdir(params.loc_output) # # The degree of spatial shape changes has to be at least as # high as the degree of spatial photometric scale # if (params.sdeg < params.pdeg): print 'Increasing params.sdeg to ', params.pdeg params.sdeg = params.pdeg # # Print out the parameters for this run. # print 'Parameters:' for par in dir(params): print par, getattr(params, par) print # # Determine our list of images # all_files = os.listdir(params.loc_data) all_files.sort() files = [] for f in all_files: print 'file', f if fnmatch.fnmatch(f, params.name_pattern): g = DS.Observation(params.loc_data + os.path.sep + f, params) del g.data del g.mask print 'fw', g.fw if g.fw > 0.0: files.append(g) print g.name, 'accepted' if len(files) < 3: print 'Only', len(files), 'files found matching', params.name_pattern print 'Exiting' sys.exit(0) # # Have we specified a registration template? # if params.registration_image: reg = DS.Observation(params.registration_image, params) else: reg = DS.EmptyBase() reg.fw = 999.0 for f in files: if (f.fw < reg.fw) and (f.fw > params.reference_min_seeing) and ( f.sky < params.registration_max_background): reg = f print 'Registration image:', reg.name # # Register images # print 'Registering images' files_copy = [f for f in files] for f in files: print f.name if f == reg: f.image = f.data rf = params.loc_output + os.path.sep + 'r_' + f.name IO.write_image(f.image, rf) else: if not f.register(reg, params): files_copy.remove(f) # delete image arrays to save memory del f.image del f.mask del f.inv_variance del reg.data del reg.image del reg.mask del reg.inv_variance files = files_copy # # Write image names and dates to a file # if params.image_list_file: try: with open(params.loc_output + os.path.sep + params.image_list_file, 'w') as fid: for f in files: date = None if params.datekey: date = IO.get_date( params.loc_data + os.path.sep + f.name, key=params.datekey) - 2450000 if date: fid.write(f.name + ' %10.5f\n' % date) else: fid.write(f.name) except: raise # # Make the photometric reference image if we don't have it. # Find stamp positions if required. # if not (os.path.exists(params.loc_output + os.path.sep + reference)): print 'Reg = ', reg.name stamp_positions = make_reference(files, reg, params, reference_image=reference) ref = DS.Observation(params.loc_output + os.path.sep + reference, params) mask, _ = IO.read_fits_file(params.loc_output + os.path.sep + 'mask_' + reference) variance, _ = IO.read_fits_file(params.loc_output + os.path.sep + 'var_' + reference) ref.mask = mask ref.inv_variance = 1.0 / variance ref.register(reg, params) else: ref = DS.Observation(params.loc_output + os.path.sep + reference, params) if os.path.exists(params.loc_output + os.path.sep + 'mask_' + reference): mask, _ = IO.read_fits_file(params.loc_output + os.path.sep + 'mask_' + reference) else: mask = np.ones_like(ref.data) ref.mask = mask ref.register(reg, params) stamp_positions = None if params.use_stamps: stamp_file = params.loc_output + os.path.sep + 'stamp_positions' if os.path.exists(stamp_file): stamp_positions = np.genfromtxt(stamp_file) else: stars = PF.choose_stamps(ref, params) stamp_positions = stars[:, 0:2] np.savetxt(stamp_file, stamp_positions) pm = params.pixel_max params.pixel_max *= 0.9 ref.mask *= IM.compute_saturated_pixel_mask(ref.image, params) params.pixel_max = pm ref.blur = IM.boxcar_blur(ref.image) if params.mask_cluster: ref.mask *= IM.mask_cluster(ref.image, ref.mask, params) # # Detect stars and compute the PSF if we are doing photometry # star_positions = None sky = 0.0 if params.do_photometry: star_file = params.loc_output + os.path.sep + 'star_positions' psf_file = params.loc_output + os.path.sep + 'psf.fits' if not (os.path.exists(psf_file)) or not (os.path.exists(star_file)): stars = PH.compute_psf_image(params, ref, psf_image=psf_file) star_positions = stars[:, 0:2] star_sky = stars[:, 4] if os.path.exists(star_file): star_positions = np.genfromtxt(star_file) star_sky = star_positions[:, 0] * 0.0 else: np.savetxt(star_file, star_positions) print 'sky =', sky # # If we have pre-determined star positions # if params.star_file: stars = np.genfromtxt(params.star_file) star_positions = stars[:, 1:3] if params.star_reference_image: star_ref, h = IO.read_fits_file(params.star_reference_image) offset, _, _ = register_translation(star_ref, ref.image, 1000) dy, dx = offset #dy, dx = IM.positional_shift(ref.image,star_ref) print 'position shift =', dx, dy star_positions[:, 0] -= dx star_positions[:, 1] -= dy np.savetxt(star_file, star_positions) # # If we are using a CPU, group the stars by location # print 'Group_check' print 'params.do_photometry', params.do_photometry print 'params.use_GPU', params.use_GPU if params.do_photometry: star_group_boundaries = None detector_mean_positions_x = None detector_mean_positions_y = None star_unsort_index = None star_sort_index,star_group_boundaries,detector_mean_positions_x,detector_mean_positions_y = \ PH.group_stars_ccd(params,star_positions,params.loc_output+os.path.sep+reference) star_positions = star_positions[star_sort_index] star_sky = star_sky[star_sort_index] star_unsort_index = np.argsort(star_sort_index) # # Do photometry of the reference image # if params.do_photometry: ref_flux_file = params.loc_output + os.path.sep + 'ref.flux' if not (os.path.exists(ref_flux_file)): result = difference_image( ref, ref, params, stamp_positions=stamp_positions, psf_image=psf_file, star_positions=star_positions, star_group_boundaries=star_group_boundaries, detector_mean_positions_x=detector_mean_positions_x, detector_mean_positions_y=detector_mean_positions_y, star_sky=star_sky) if isinstance(result.flux, np.ndarray): print 'ungrouping fluxes' result.flux = result.flux[star_unsort_index].copy() result.dflux = result.dflux[star_unsort_index].copy() np.savetxt(ref_flux_file, np.vstack((result.flux, result.dflux)).T) # # Process images # if params.make_difference_images: if not (params.use_GPU) and (params.n_parallel > 1): pool = Pool(params.n_parallel) pool.map( process_image_helper, itertools.izip( files, itertools.repeat( (ref, params, stamp_positions, star_positions, star_group_boundaries, star_unsort_index, detector_mean_positions_x, detector_mean_positions_y)))) else: for f in files: process_image( f, (ref, params, stamp_positions, star_positions, star_group_boundaries, star_unsort_index, detector_mean_positions_x, detector_mean_positions_y)) return files
def do_photometry(params, extname='newflux', star_file='star_positions', psf_file='psf.fits', star_positions=None, reference_image='ref.fits'): # # Determine our list of files # all_files = os.listdir(params.loc_data) all_files.sort() files = [] for f in all_files: if fnmatch.fnmatch(f, params.name_pattern): g = DS.Observation(params.loc_data + os.path.sep + f, params) if g.fw > 0.0: files.append(g) ref = DS.Observation(params.loc_output + os.path.sep + reference_image, params) ref.register(ref, params) # # Detect stars and compute the PSF if necessary # psf_file = params.loc_output + os.path.sep + psf_file star_file = params.loc_output + os.path.sep + star_file print psf_file print os.path.exists(psf_file) print star_file print os.path.exists(star_file) if not (os.path.exists(psf_file)) or not (os.path.exists(star_file)): stars = PH.compute_psf_image(params, ref, psf_image=psf_file) if star_positions is None: if os.path.exists(star_file): star_positions = np.genfromtxt(star_file)[:, :2] else: star_positions = stars[:, 0:2] # # Group the stars by location # star_group_boundaries = None detector_mean_positions_x = None detector_mean_positions_y = None star_sort_index,star_group_boundaries,detector_mean_positions_x,detector_mean_positions_y = \ PH.group_stars_ccd(params,star_positions,params.loc_output+os.path.sep+reference_image) star_positions = star_positions[star_sort_index] star_unsort_index = np.argsort(star_sort_index) # # Process the reference image # print 'Processing', reference_image ref = DS.Observation(params.loc_output + os.path.sep + reference_image, params) #reg = Observation(params.loc_data+os.path.sep+ # params.registration_image,params) mask, _ = IO.read_fits_file(params.loc_output + os.path.sep + 'mask_' + reference_image) variance, _ = IO.read_fits_file(params.loc_output + os.path.sep + 'var_' + reference_image) ref.mask = mask ref.inv_variance = 1.0 / variance + (1 - mask) ref.register(ref, params) smask = IM.compute_saturated_pixel_mask(ref.image, params) ref.inv_variance += (1 - (smask * mask)) * 1.e-12 ktable = params.loc_output + os.path.sep + 'k_' + os.path.basename( reference_image) kernelIndex, extendedBasis, c, params = IO.read_kernel_table( ktable, params) kernelRadius = np.max(kernelIndex[:, 0]) + 1 if np.sum(extendedBasis) > 0: kernelRadius += 1 print 'kernelIndex', kernelIndex print 'extendedBasis', extendedBasis print 'coeffs', c print 'kernelRadius', kernelRadius print 'star_positions', star_positions.shape phot_target, _ = IO.read_fits_file(params.loc_output + os.path.sep + 'clean_' + reference_image) ref.flux, ref.dflux = CIF.photom_all_stars_simultaneous( phot_target, ref.inv_variance, star_positions, psf_file, c, kernelIndex, extendedBasis, kernelRadius, params, star_group_boundaries, detector_mean_positions_x, detector_mean_positions_y) if isinstance(ref.flux, np.ndarray): if not (params.use_GPU): print 'ungrouping fluxes' ref.flux = ref.flux[star_unsort_index].copy() ref.dflux = ref.dflux[star_unsort_index].copy() print ref.flux.shape, star_positions.shape np.savetxt( params.loc_output + os.path.sep + reference_image + '.' + extname, np.vstack((ref.flux, ref.dflux))) # # Process difference images # for f in files: if not (os.path.exists(params.loc_output + os.path.sep + f.name + '.' + extname)): print 'Processing', f.name target = f.name dtarget = params.loc_output + os.path.sep + 'd_' + os.path.basename( target) ntarget = params.loc_output + os.path.sep + 'n_' + os.path.basename( target) ztarget = params.loc_output + os.path.sep + 'z_' + os.path.basename( target) ktable = params.loc_output + os.path.sep + 'k_' + os.path.basename( target) if os.path.exists(dtarget) and os.path.exists( ntarget) and os.path.exists(ktable): norm, h = IO.read_fits_file(ntarget) diff, h = IO.read_fits_file(dtarget) mask, h = IO.read_fits_file(ztarget) inv_var = (norm / diff)**2 + (1 - mask) kernelIndex, extendedBasis, c, params = IO.read_kernel_table( ktable, params) kernelRadius = np.max(kernelIndex[:, 0]) + 1 if np.sum(extendedBasis) > 0: kernelRadius += 1 print 'kernelIndex', kernelIndex print 'extendedBasis', extendedBasis print 'coeffs', c print 'kernelRadius', kernelRadius IO.write_image(diff, params.loc_output + os.path.sep + 'diff1.fits') diff = IM.undo_photometric_scale(diff, c, params.pdeg) IO.write_image(diff, params.loc_output + os.path.sep + 'diff2.fits') IO.write_image( inv_var, params.loc_output + os.path.sep + 'inv_var.fits') IO.write_kernel_table( params.loc_output + os.path.sep + 'ktable.fits', kernelIndex, extendedBasis, c, params) flux, dflux = CI.photom_all_stars( diff, inv_var, star_positions, psf_file, c, kernelIndex, extendedBasis, kernelRadius, params, star_group_boundaries, detector_mean_positions_x, detector_mean_positions_y) print 'flux[100:110]:' print flux[100:110] if isinstance(flux, np.ndarray): if not (params.use_GPU): print 'ungrouping fluxes' flux = flux[star_unsort_index].copy() dflux = dflux[star_unsort_index].copy() print 'unsort flux[100:110]:' print flux[100:110] np.savetxt( params.loc_output + os.path.sep + f.name + '.' + extname, np.vstack((flux, dflux)).T) sys.exit(0)
def do_photometry(params, extname='newflux', star_file='star_positions', psf_file='psf.fits', star_positions=None, reference_image='ref.fits'): # # Determine our list of files # all_files = os.listdir(params.loc_data) all_files.sort() files = [] for f in all_files: if fnmatch.fnmatch(f, params.name_pattern): g = DS.Observation(params.loc_data + os.path.sep + f, params) if g.fw > 0.0: files.append(g) ref = DS.Observation(params.loc_output + os.path.sep + reference_image, params) ref.register(ref, params) # # Detect stars and compute the PSF if necessary # if params.do_photometry: psf_file = params.loc_output + os.path.sep + psf_file if os.path.exists(params.star_file): star_pos = np.genfromtxt(params.star_file)[:, 1:3] if not (os.path.exists(psf_file)): stars = PH.compute_psf_image(params, ref, psf_image=psf_file) else: if not (os.path.exists(star_file)): stars = PH.compute_psf_image(params, ref, psf_image=psf_file) star_pos = stars[:, 0:2] np.savetxt(star_file, star_pos) else: star_pos = np.genfromtxt(star_file) if not (os.path.exists(psf_file)): stars = PH.compute_psf_image(params, ref, psf_image=psf_file) # # Have we been passed an array of star positions? # if star_positions == None: star_positions = star_pos # # If we are using a CPU, group the stars by location # star_group_boundaries = None detector_mean_positions_x = None detector_mean_positions_y = None if not (params.use_GPU): star_sort_index, star_group_boundaries, detector_mean_positions_x, detector_mean_positions_y = PH.group_stars_ccd( params, star_positions, params.loc_output + os.path.sep + reference_image) star_positions = star_positions[star_sort_index] star_unsort_index = np.argsort(star_sort_index) # # Process the reference image # print('Processing', reference_image) ref = DS.Observation(params.loc_output + os.path.sep + reference_image, params) # reg = Observation(params.loc_data+os.path.sep+ # params.registration_image,params) ref.register(ref, params) smask = IM.compute_saturated_pixel_mask(ref.image, 6, params) ref.inv_variance += 1 - smask ktable = params.loc_output + os.path.sep + 'k_' + os.path.basename( reference_image) kernelIndex, extendedBasis, c, params = IO.read_kernel_table( ktable, params) kernelRadius = np.max(kernelIndex[:, 0]) + 1 if np.sum(extendedBasis) > 0: kernelRadius += 1 print('kernelIndex', kernelIndex) print('extendedBasis', extendedBasis) print('coeffs', c) print('kernelRadius', kernelRadius) phot_target = ref.image ref.flux, ref.dflux = CI.photom_all_stars( phot_target, ref.inv_variance, star_positions, psf_file, c, kernelIndex, extendedBasis, kernelRadius, params, star_group_boundaries, detector_mean_positions_x, detector_mean_positions_y) if isinstance(ref.flux, np.ndarray): if not (params.use_GPU): print('ungrouping fluxes') ref.flux = ref.flux[star_unsort_index].copy() ref.dflux = ref.dflux[star_unsort_index].copy() np.savetxt( params.loc_output + os.path.sep + reference_image + '.' + extname, np.vstack((ref.flux, ref.dflux)).T) # # Process difference images # for f in files: if not (os.path.exists(params.loc_output + os.path.sep + f.name + '.' + extname)): print('Processing', f.name) target = f.name dtarget = params.loc_output + os.path.sep + 'd_' + os.path.basename( target) ntarget = params.loc_output + os.path.sep + 'n_' + os.path.basename( target) ztarget = params.loc_output + os.path.sep + 'z_' + os.path.basename( target) ktable = params.loc_output + os.path.sep + 'k_' + os.path.basename( target) if os.path.exists(dtarget) and os.path.exists( ntarget) and os.path.exists(ktable): norm, h = IO.read_fits_file(ntarget) diff, h = IO.read_fits_file(dtarget) mask, h = IO.read_fits_file(ztarget) inv_var = (norm / diff)**2 + (1 - mask) kernelIndex, extendedBasis, c, params = IO.read_kernel_table( ktable, params) kernelRadius = np.max(kernelIndex[:, 0]) + 1 if np.sum(extendedBasis) > 0: kernelRadius += 1 print('kernelIndex', kernelIndex) print('extendedBasis', extendedBasis) print('coeffs', c) print('kernelRadius', kernelRadius) diff = IM.undo_photometric_scale(diff, c, params.pdeg) flux, dflux = PH.photom_all_stars( diff, inv_var, star_positions, psf_file, c, kernelIndex, extendedBasis, kernelRadius, params, star_group_boundaries, detector_mean_positions_x, detector_mean_positions_y) if isinstance(flux, np.ndarray): if not (params.use_GPU): print('ungrouping fluxes') flux = flux[star_unsort_index].copy() dflux = dflux[star_unsort_index].copy() np.savetxt( params.loc_output + os.path.sep + f.name + '.' + extname, np.vstack((flux, dflux)).T)