def crop_masters(path=cmpath): mastercollection=ImageFileCollection('Master_Files') for image, imname in mastercollection.ccds(imtype='trimmed bias',return_fname=True): trimage=ccdp.trim_image(image,fits_section=str(sciwin)) trimage.meta['trimwind']=(str(sciwin),'readout window') trimage.meta['imtype'] = ('mbias', 'windowed master bias') trimage.write(path+imname,overwrite=True) for image, imname in mastercollection.ccds(imtype='subflat',return_fname=True): trimage=ccdp.trim_image(image,fits_section=str(sciwin)) trimage.meta['trimwind']=(str(sciwin),'readout window') trimage.meta['imtype'] = ('mflat', 'windowed master flat') trimage.write(path+imname,overwrite=True)
def sub_bias(refresh='2', bias='2'): tflatcollection = ImageFileCollection('Trimmed_Flat') if bias == '1': biaspath = 'Master_Files/mbias_median.fits' dest = 'Trimmed_Flat/subflatsmed/' elif bias == '2': biaspath = 'Master_Files/mbias.fits' dest = 'Trimmed_Flat/subflatssig/' if refresh == '1': subflatpathlist = [] mbias = CCDData.read(biaspath, unit='adu') for ccdf, flatn in tflatcollection.ccds(imtype='trimmed flat', return_fname=True): subflat = ccdp.subtract_bias(ccdf, mbias, add_keyword='subbias') subflat.meta['imtype'] = ('subflat', 'bias subtracted flat') subflat.write(dest + flatn[0:8] + '_subbias.fits', overwrite=True) subflatpathlist.append(dest + flatn[0:8] + '_subbias.fits') else: try: subflatcollection = ImageFileCollection(dest) subflatpathlist = subflatcollection.files_filtered( imtype='subflat', include_path=True) print('found', len(subflatpathlist), 'subflats') except: print('can\'t locate subflats, create or check directory') sys.exit() return tflatcollection, subflatpathlist
def trim_flat(refresh='2'): flatcollection = ImageFileCollection('HD115709/flat_SII', ext=4) flag = 0 tflatpathlist = [] if refresh == '1': for ccdf, flatn in flatcollection.ccds(return_fname=True, ccd_kwargs={'unit': 'adu'}): if flag == 0: print('all flats will be trimmed to :', ccdf.meta['trimsec']) flag = 1 print('trimming', flatn) tflat = ccdp.trim_image(ccdf, fits_section=str(ccdf.meta['trimsec'])) tflat.meta['imtype'] = ('trimmed flat', 'type of image') tflat.meta['taxis1'] = (2048, 'dimension1') tflat.meta['taxis2'] = (4096, 'dimension2') tflat.write('Trimmed_Flat/' + flatn[0:8] + '_trim.fits', overwrite=True) tflatpathlist.append('Trimmed_Flat/' + flatn[0:8] + '_trim.fits') print('created', len(tflatpathlist), 'trimmed flats') elif refresh == '2': try: tflatcollection = ImageFileCollection('Trimmed_Flat') tflatpathlist = tflatcollection.files_filtered( imtype='trimmed flat', include_path=True) print('found', len(tflatpathlist), 'trimmed flats') except: print('can\'t locate trimmed flats, create or check directory') sys.exit(0) return flatcollection, tflatpathlist
def trim_bias(refresh='2'): biascollection = ImageFileCollection('HD115709/bias', ext=4) flag = 0 if refresh == '1': tbiaspathlist = [] for ccdb, biasn in biascollection.ccds(return_fname=True, ccd_kwargs={'unit': 'adu'}): if flag == 0: print('all biases will be trimmed to :', ccdb.meta['trimsec']) flag = 1 print('trimming', biasn) tbias = ccdp.trim_image(ccdb, fits_section=str(ccdb.meta['trimsec'])) tbias.meta['imtype'] = ('trimmed bias', 'type of image') tbias.meta['taxis1'] = (2048, 'dimension1') tbias.meta['taxis2'] = (4096, 'dimension2') tbias.write('Trimmed_Bias/' + biasn[0:8] + '_trim.fits', overwrite=True) tbiaspathlist.append('Trimmed_Bias/' + biasn[0:8] + '_trim.fits') print('created', len(tbiaspathlist), 'trimmed biases') else: try: tbiascollection = ImageFileCollection('Trimmed_Bias') tbiaspathlist = tbiascollection.files_filtered( imtype='trimmed bias', include_path=True) print('found', len(tbiaspathlist), 'trimmed bias') except: print('can\'t locate trimmed biases, create or check directory') sys.exit() return biascollection, tbiaspathlist
def t120_mkoffset(offset_dir=t120.t120_ofst_dir, master_file_name=t120.t120_master_name): master_file = offset_dir + master_file_name listimg = ImageFileCollection( offset_dir) #,glob_include='*.fit',glob_exclude='*.fits') listccd = [] for ccd, file_name in listimg.ccds(ccd_kwargs={'unit': 'adu'}, return_fname=True): t120.log.info('now considering file ' + file_name) listccd.append(ccd) combiner = Combiner(listccd) t120.log.info('now making the combination') master_offset = combiner.median_combine() fits_ccddata_writer(master_offset, master_file) t120.log.info('Result saved in ' + master_file) return master_file
def trim_bias(refresh=False): biascollection = ImageFileCollection('HD115709/bias', ext=4) flag = 0 tbiaspathlist=[] if refresh == True: for ccdb, biasn in biascollection.ccds(return_fname=True, ccd_kwargs={'unit': 'adu'}): print('trimming', biasn) ccdb.header['imtype'] = ('bias', 'type of image') if flag == 0: print('all biases will be trimmed to :', ccdb.meta['trimsec']) flag = 1 tbias = ccdp.trim_image(ccdb, fits_section=str(ccdb.meta['trimsec'])) tbias.meta['imtype'] = ('trimmed bias', 'type of image') tbias.meta['taxis1'] = (2048, 'dimension1') tbias.meta['taxis2'] = (4096, 'dimension2') tbias.write('Trimmed_Bias/' + biasn[0:8] + '_trim.fits', overwrite=True) tbiaspathlist.append('Trimmed_Bias/' + biasn[0:8] + '_trim.fits') return biascollection,tbiaspathlist
listimg = ImageFileCollection( t120.t120_flat_dir) #,glob_include='*.fit',glob_exclude='*.fits') list_filters = listimg.values('filter', unique=True) #for filter_name in listimg.values('filter',unique=True): for filter_name in list_filters: t120.log.info('*** filter: ' + filter_name) my_files = listimg.files_filtered(filter=filter_name) t120.log.info('my_files=' + my_files) pouet for filter_name in listimg.values('filter', unique=True): t120.log.info('*** filter: ' + filter_name) listccd = [] for ccd, file_name in listimg.ccds(ccd_kwargs={'unit': 'adu'}, filter=filter_name, return_fname=True): t120.log.info('now considering file ' + file_name) listccd.append(ccd) """ t120.log.info('now making the Combiner object') combiner = Combiner(listccd) t120.log.info('now making the combination') master_flat = combiner.average_combine() master_file = t120.t120_flat_dir+'/master-'+filter_name+'.fits' fits_ccddata_writer(master_flat,master_file) t120.log.info('Master flat saved in '+master_file) """
import itertools from astropy.stats import sigma_clip, mad_std import time import sys if not os.path.exists('Master_Files/Windowed'): os.makedirs('Master_Files/Windowed') print('folder \'Master_Files/Windowed\' created') if not os.path.exists('Final_Science'): os.makedirs('Final_Science') print('folder \'Final_Science\' created') #read science images and find RTDATSEC sciencecollection=ImageFileCollection('HD115709/SII',ext=1) for sci in sciencecollection.ccds(ccd_kwargs={'unit': 'adu'}): sciwin=sci.meta['RTDATSEC'] break cmpath='Master_Files/Windowed/' cspath='Final_Science/' #crop master biases and master flats def crop_masters(path=cmpath): mastercollection=ImageFileCollection('Master_Files') for image, imname in mastercollection.ccds(imtype='trimmed bias',return_fname=True): trimage=ccdp.trim_image(image,fits_section=str(sciwin)) trimage.meta['trimwind']=(str(sciwin),'readout window') trimage.meta['imtype'] = ('mbias', 'windowed master bias') trimage.write(path+imname,overwrite=True) for image, imname in mastercollection.ccds(imtype='subflat',return_fname=True):
def t120_makecosmetic(work_dir=t120.t120_data_path, orig_dir_root=t120.t120_orig_dir, reduc_dir_root=t120.t120_redu_dir, offset_dir_root=t120.t120_ofst_dir, flat_dir_root=t120.t120_flat_dir, dark_dir_root=t120.t120_dark_dir, common_dir_root=t120.t120_common_root, scampahead_file=t120.t120_scamp_ahead, master_offset_name=t120.t120_master_name): orig_dir = work_dir + orig_dir_root reduc_dir = work_dir + reduc_dir_root offset_dir = work_dir + offset_dir_root flat_dir = work_dir + flat_dir_root dark_dir = work_dir + dark_dir_root master_offset_file = offset_dir + t120.t120_master_name # now check existence of useful files and directories for subdir in [ work_dir, work_dir, orig_dir, offset_dir, flat_dir, t120.t120_common_dir ]: if not os.path.isdir(subdir): msg = '*** FATAL ERROR: directory ' + subdir + ' does not exist' t120.log.error(msg) raise IOError(msg) try: master_offset = fits_ccddata_reader(master_offset_file) except: msg = '*** FATAL ERROR while reading ' + master_offset_file t120.log.error(msg) raise IOError(msg) # read scamp ahead file #scamp_header = read_scamp_ahead(scampahead_file) # loop over imagess listremove = [] listimg = ImageFileCollection( orig_dir + '/') #,glob_include='*-c.fits',glob_exclude='*.fit') for ccd, fit_file in listimg.ccds( ccd_kwargs={'unit': 'adu'}, return_fname=True, save_location=reduc_dir ): #,save_with_name='-c',save_location=reduc_dir+'/'): t120.log.info('now treating file: ' + orig_dir + fit_file) filter_name = ccd.header['FILTER'] flat_name = flat_dir + '/master-' + filter_name + '.fits' master_flat = fits_ccddata_reader(flat_name, unit=u.adu) hdu = fits.open(orig_dir + fit_file) exp_time = hdu[0].header['EXPTIME'] strexptime = "%3.1f" % exp_time t120.log.info('exp_time=' + strexptime) dark_name = dark_dir + t120.t120_master_name.replace( '.fits', '') + '-' + strexptime + '.fits' master_dark = fits_ccddata_reader(dark_name, unit=u.adu) t120.log.info('Flat: ' + flat_name) t120.log.info('Dark: ' + dark_name) master_dark.header['EXPOSURE'] = ccd.header[ 'EXPOSURE'] # for dark subtraction master_offset.header['EXPOSURE'] = ccd.header[ 'EXPOSURE'] # for dark subtraction ccd_corr = ccd_process(ccd, exposure_key='EXPOSURE', exposure_unit=u.second, dark_frame=master_dark, master_flat=master_flat) #ccd_corr.header = put_scamp_header(ccd_corr.header,scamp_header) # update header skycoo = SkyCoord(ccd_corr.header['OBJCTRA'] + ' ' + ccd_corr.header['OBJCTDEC'], unit=(u.hourangle, u.deg)) ccd_corr.header['CRVAL1'] = ( skycoo.ra.to('deg').value, 'Reference Right ascencion in decimal deg') ccd_corr.header['CRVAL2'] = (skycoo.dec.to('deg').value, 'Reference Declination in decimal deg') out_fit_file = reduc_dir + '/' + os.path.splitext( fit_file)[0] + '-c.fits' if os.path.exists(out_fit_file): os.system('rm ' + out_fit_file) t120.log.info('File ' + out_fit_file + ' has been removed') # make primary HDU hducorrlist = ccd_corr.to_hdu() ccd_tosave = CCDData(hducorrlist[0].data, unit=u.adu) ccd_tosave.header = hducorrlist[0].header fits_ccddata_writer(ccd_tosave, out_fit_file) t120.log.info('corrected image saved in ' + out_fit_file) copy_fit_file = reduc_dir + fit_file t120.log.info('copy_fit_file ' + copy_fit_file) listremove.append(copy_fit_file) # remove copy of original fits files for file2remove in listremove: t120.log.info('now removing file ' + file2remove) if os.path.exists(file2remove): os.system('rm ' + file2remove) t120.log.info('File ' + file2remove + ' has been removed') else: t120.log.info('File ' + file2remove + ' does not exist') return
def photometry_on_directory(directory_with_images, object_of_interest, star_locs, aperture_rad, inner_annulus, outer_annulus, max_adu, star_ids, camera, bjd_coords=None, observatory_location=None, fwhm_by_fit=True): """ Perform aperture photometry on a directory of images. Parameters ---------- directory_with_images : str Folder containing the images on which to do photometry. Photometry will only be done on images that contain the ``object_of_interest``. object_of_interest : str Name of the object of interest. The only files on which photometry will be done are those whose header contains the keyword ``OBJECT`` whose value is ``object_of_interest``. star_locs : tuple of numpy array The first entry in the tuple should be the right ascension of the sources, in degrees. The second should be the declination of the sources, in degrees. aperture_rad : int Radius of the aperture to use when performing photometry. inner_annulus : int Inner radius of annulus to use in for performing local sky subtraction. outer_annulus : int Outer radius of annulus to use in for performing local sky subtraction. max_adu : int Maximum allowed pixel value before a source is considered saturated. star_ids : array-like Unique identifier for each source in ``star_locs``. camera : `stellarphot.Camera` object Camera object which has gain, read noise and dark current set. gain : float Gain, in electrons/ADU, of the camera that took the image. The gain is used in calculating the instrumental magnitude. read_noise : float Read noise of the camera in electrons. Used in the CCD equation to calculate error. dark_current : float Dark current, in electron/sec. Used in the CCD equation to calculate error. """ ifc = ImageFileCollection(directory_with_images) phots = [] missing_stars = [] for a_ccd, fname in ifc.ccds(object=object_of_interest, return_fname=True): print('on image ', fname) try: # Convert RA/Dec to pixel coordinates for this image pix_coords = a_ccd.wcs.all_world2pix(star_locs[0], star_locs[1], 0) except AttributeError: print(' ....SKIPPING THIS IMAGE, NO WCS') continue xs, ys = pix_coords # Remove anything that is too close to the edges/out of frame padding = 3 * aperture_rad out_of_bounds = ((xs < padding) | (xs > (a_ccd.shape[1] - padding)) | (ys < padding) | (ys > (a_ccd.shape[0] - padding))) in_bounds = ~out_of_bounds # Find centroids of each region around star that is in_bounds xs_in = xs[in_bounds] ys_in = ys[in_bounds] print(' ...finding centroids') try: xcen, ycen = centroid_sources(a_ccd.data, xs_in, ys_in, box_size=2 * aperture_rad + 1) except NoOverlapError: print(' ....SKIPPING THIS IMAGE, CENTROID FAILED') continue # Calculate offset between centroid in this image and the positions # based on input RA/Dec. Later we will set the magnitude of those with # large differences to an invalid value (maybe). center_diff = np.sqrt((xs_in - xcen)**2 + (ys_in - ycen)**2) # FWHM is typically 5-6 pixels. The center really shouldn't move # by more than that. too_much_shift = center_diff > 6 xcen[too_much_shift] = xs_in[too_much_shift] ycen[too_much_shift] = ys_in[too_much_shift] # Set up apertures and annuli based on the centroids in this image. ap_locs = np.array([xcen, ycen]).T aps = CircularAperture(ap_locs, r=aperture_rad) anuls = CircularAnnulus(ap_locs, inner_annulus, outer_annulus) # Set any clearly bad values to NaN a_ccd.data[a_ccd.data > max_adu] = np.nan print(' ...doing photometry') # Do the photometry... pho = aperture_photometry(a_ccd.data, (aps, anuls), mask=a_ccd.mask, method='center') # We may have some stars we did not do photometry for because # those stars were out of bounds. # Add the ones we missed to the list of missing missed = star_ids[out_of_bounds] missing_stars.append(missed) # Add all the extra goodies to the table print(' ...adding extra columns') add_to_photometry_table(pho, a_ccd, anuls, aps, fname=fname, star_ids=star_ids[in_bounds], camera=camera, bjd_coords=bjd_coords, observatory_location=observatory_location, fwhm_by_fit=fwhm_by_fit) # And add the final table to the list of tables phots.append(pho) # ### Combine all of the individual photometry tables into one all_phot = vstack(phots) # ### Eliminate any stars that are missing from one or more images # # This makes life a little easier later... uniques = set() for miss in missing_stars: uniques.update(set(miss)) actually_bad = sorted([u for u in uniques if u in all_phot['star_id']]) len(uniques), len(actually_bad) all_phot.add_index('star_id') if actually_bad: bad_rows = all_phot.loc_indices[actually_bad] try: bad_rows = list(bad_rows) except TypeError: bad_rows = [bad_rows] all_phot.remove_indices('star_id') all_phot.remove_rows(sorted(bad_rows)) all_phot.remove_indices('star_id') gain = camera.gain noise = calculate_noise(gain=camera.gain, read_noise=camera.read_noise, dark_current_per_sec=camera.dark_current, flux=all_phot['aperture_net_flux'], sky_per_pix=all_phot['sky_per_pix_avg'].value, aperture_area=all_phot['aperture_area'], annulus_area=all_phot['annulus_area'], exposure=all_phot['exposure'].value, include_digitization=False) snr = gain * all_phot['aperture_net_flux'] / noise all_phot['mag_error'] = 1.085736205 / snr all_phot['noise'] = noise # AstroImageJ includes a factor of gain in the noise. IMHO it is part of the # flux but, for convenience, here it is all_phot['noise-aij'] = noise / gain all_phot['snr'] = snr return all_phot