def covert_3dto2d(self, files): files = filenames2list(files, forcelist=True) for file in files: d, h = fits.getdata(file, header=True) d = d[0, :, :] h.remove('naxis3') fits.update(file, d, header=h)
def bias_file_by_file(fname, biasname, datahdus=0): hdulist = fits.open(fname) hdubias = fits.open(biasname) nhdus = len(hdulist) if nhdus > 1: istart = 1 else: istart = 0 hduindexes = list(range(nhdus))[istart:] if datahdus != 0: hduindexes = datahdus for i in hduindexes: data1 = ccdproc.CCDData(hdulist[i].data, unit="adu") data1.header = hdulist[i].header bias1 = ccdproc.CCDData(hdubias[i].data, unit="adu") bias1.header = hdubias[i].header commentstr = "Bias image is " + biasname # proc1 = ccdproc.subtract_bias(data1,bias1,add_keyword={'bias': True, 'calstat': 'OTZ', 'history':commentstr} ) proc1 = ccdproc.subtract_bias(data1, bias1, add_keyword={ 'bias': True, 'calstat': 'OTZ' }) fits.update(fname, proc1.data, header=proc1.header, ext=i) # fits.update(fname, proc1.data, ext=i) hdulist.close() hdubias.close() mylog("Bias corrected {0} with {1}".format(fname, biasname)) return
def fitsarith(input1, input2, output, operation, const1=None, const2=None): # read input files - must have same extension structure! hdu1 = fits.open(input1) hdu2 = fits.open(input2) # build output file by copying input1 shutil.copyfile(input1,output) # loop through extensions, find image extensions only, and do arithmetic for i in range(len(hdu1)): exten1 = hdu1[i] exten2 = hdu2[i] if not isinstance(exten1, (fits.hdu.image.ImageHDU, fits.hdu.compressed.CompImageHDU)): continue if const1 is None: data1 = exten1.data else: data1 = const1 * exten1.data if const2 is None: data2 = exten2.data else: data2 = const2* exten2.data if operation == "+": data_output = data1 + data2 elif operation == "-": data_output = data1 - data2 elif operation == "*": data_output = data1 * data2 elif operation == "/": data_output = data1 / data2 else: raise ValueError("Unknown operator %s" % operation) # update this extension in output fits file fits.update(output,data_output,i,header=exten1.header)
def cand_list_operation(filename, det_tab=None, mode='save'): """ Creating CANDIDATES_LIST as an extension table of difference image FITS. Parameters: ---------- filename: str filename of the difference image det_tab: pd.DataFrame / None 1. DETECTION_TABLE or CANDIDATES_LIST DataFrame 2. None if using mode='load' mode: str ['save','load','update'] """ if mode == 'save': # create astropy table m = Table(det_tab.values, names=det_tab.columns) hdu = fits.table_to_hdu(m) # add extension table on top of the difference image with fits.open(filename, mode='update') as hdul0: hdul0.append(hdu) hdul0[-1].header['EXTNAME'] = 'CANDIDATES_LIST' hdul0.flush() elif mode == 'load': det_tab = getdata(filename, 'CANDIDATES_LIST') det_tab = pd.DataFrame(np.array(det_tab).byteswap().newbyteorder()) return det_tab elif mode == 'update': m = Table(det_tab.values, names=det_tab.columns) hdr = getheader(filename, extname='CANDIDATES_LIST') update(filename, np.array(m), extname='CANDIDATES_LIST', header=hdr)
def run_sector_camera_chip(base_dir, output_dir, sector, camera, chip): pattern = os.path.join(base_dir, "tess", "ffi", "s{0:04d}".format(sector), "*", "*", "{0:d}-{1:d}".format(camera, chip), "*.fits") outdir = os.path.join(output_dir, "s{0:04d}".format(sector), "{0:d}-{1:d}".format(camera, chip)) os.makedirs(outdir, exist_ok=True) open(os.path.join(outdir, "index.auto"), "w").close() open(os.path.join(os.path.dirname(outdir), "index.auto"), "w").close() fns = list(sorted(glob.glob(pattern))) postcard_fns = make_postcards(fns, outdir) # Ensures no postcards have been repeated postcard_fns = np.unique(postcard_fns) # Writes in the background after making the postcards print("Computing backgrounds...") for fn in tqdm.tqdm(postcard_fns, total=len(postcard_fns)): with fits.open(fn) as hdu: bkg = calc_2dbkg(hdu[2].data, hdu[1].data['QUALITY'], hdu[1].data['TSTART']) # Checks to make sure there isn't a background extension already if len(hdu) < 5: fits.append(fn, bkg) else: fits.update(fn, bkg, 4)
def fitsarith(inputlist, output, expression): if not inputlist: raise ValueError("Input is empty") hdus_in = [fits.open(fin) for fin in inputlist] use_expr = "" use_expr += expression n_in = len(hdus_in) # Do in reverse in case someone want 10 or more images for i in range(n_in - 1, -1, -1): from_string = "@%i" % i to_string = "h[%i]" % i use_expr = use_expr.replace(from_string, to_string) n_hdu = len(hdus_in[0]) shutil.copyfile(inputlist[0], output) for j in range(n_hdu): if not isinstance( hdus_in[0][j], (fits.hdu.image.ImageHDU, fits.hdu.compressed.CompImageHDU)): continue h = [hdus[j].data for hdus in hdus_in] data_output = eval(use_expr) fits.update(output, data_output, j, header=hdus_in[0][j].header)
def flat_file_by_file(fname, flatname, datahdus=0): hdulist = fits.open(fname) hduflat = fits.open(flatname) nhdus = len(hdulist) if nhdus > 1: istart = 1 else: istart = 0 hduindexes = list(range(nhdus))[istart:] if datahdus != 0: hduindexes = datahdus for i in hduindexes: data1 = ccdproc.CCDData(hdulist[i].data, unit="adu") data1.header = hdulist[i].header flat1 = ccdproc.CCDData(hduflat[i].data, unit="adu") flat1.header = hduflat[i].header if i == 1: flatscale = np.mean(flat1) # flat1 = flat1/flatscale commentstr = "Flat image is " + flatname + " with scale" + str( flatscale) proc1 = ccdproc.flat_correct(data1, flat1, add_keyword={ 'flat': True, 'calstat': 'OTZF', 'history': commentstr }) fits.update(fname, proc1.data, header=proc1.header, ext=i) # fits.update(fname, proc1.data, ext=i) hdulist.close() hduflat.close() mylog("Flat corrected {0} with {1}".format(fname, flatname)) return
def create_data(imdir, rot, ov): """ imdir: directory for simulated fits image data rot: pupil rotation in degrees ov: oversample for simulation Writes sim data to fitsimdir """ npix = 81 wave = 4.3e-6 # SI fnfmt = '/psf_nrm_{2:.1f}_{0}_{1}_rot{3:.3f}d.fits' # expects strings of %(npix, holeshape, wave/um, rot_d) rot = utils.avoidhexsingularity(rot) # in utils affine_rot = utils.Affine2d(rotradccw=np.pi*rot/180.0, name="rot{0:.3f}d".format(rot)) # in utils jw = NRM_Model(mask='jwst', holeshape='hex', affine2d=affine_rot) jw.set_pixelscale(PIXELSCALE_r) jw.simulate(fov=81, bandpass=MONOF430M, over=ov) psffn = fnfmt.format(npix, 'hex', wave/um, rot) fits.writeto(imdir+psffn, jw.psf, overwrite=True) header = fits.getheader(imdir+psffn) header = utils.affinepars2header(header, affine_rot) fits.update(imdir+psffn, jw.psf, header=header) del jw return psffn # filename only, not full path
def generate_sec_offsets_new(name): print name hdulist = pyfits.open('../AIS_GAL_SCAN/asprta/%s-asprta.fits'%name) initial = 0 final = hdulist[1].data['T'].shape[0]-1 centers = [] center_time = [] for i in range(initial, final+1): #center = np.load('../data/%s/cata/centroids_rot%d.npy'%(name, i)) #if center.shape!=(2,3): # print i, center.shape centers.append(np.load('../data/%s/cata/centroids_rot%d.npy'%(name, i))) center_time.append(np.load('../data/%s/cata/time_rot%d.npy'%(name, i))) centroids = np.concatenate(centers, axis=0) time = np.concatenate(center_time, axis=0) print centroids.shape print time.shape output = '../plots/%s/cata/offsets_10_new_sec.pdf'%name dir = os.path.dirname(output) if not os.path.exists(dir): os.makedirs(dir) plt.plot(centroids[:,0], '.b') plt.savefig('../plots/%s/cata/offsets_10_new_sec.pdf'%name, dpi=190) plt.clf() np.save('../data/%s/cata/offsets%d_10_new_sec.npy'%(name, initial), centroids) np.save('../data/%s/cata/time%d_10_new_sec.npy'%(name, initial), time) co_data = hdulist[1].data T = co_data['T'] ra = co_data['ra'] dec = co_data['dec'] roll = co_data['roll'] ra_new = np.interp(time, T, ra) - centroids[:,0] dec_new = np.interp(time, T, dec) - centroids[:,1] roll_new = np.interp(time, T, roll) - centroids[:,2] other = np.zeros((time.shape[0], 8)) array = np.concatenate([np.array([time, ra_new, dec_new, roll_new]).T, other], axis=1) data = np.core.records.fromarrays(array.transpose(), dtype=[('T', float), ('RA', float), ('DEC', float), ('ROLL', float),\ ('STATUS_FLAG', int), ('ROLL_RAD', float), ('X', float), ('Y', float), ('Z', float), ('XDOT', float), ('YDOT', float), ('ZDOT', float)]) new_file = '../AIS_GAL_SCAN/asprta/%s-sec-asprta.fits'%(name) os.system('cp ../AIS_GAL_SCAN/asprta/%s-asprta.fits ../AIS_GAL_SCAN/asprta/%s-sec-asprta.fits'%(name, name)) update(new_file, data, 1) hdu = pyfits.open(new_file) print hdu[1].data['RA'].shape print hdu[1].data['DEC'].shape hdu.close() tmp_files = glob.glob("../data/%s/cata/centroids_rot*"%name) for tmp_file in tmp_files: os.remove(tmp_file) tmp_files = glob.glob("../data/%s/cata/time_rot*"%name) for tmp_file in tmp_files: os.remove(tmp_file)
def update_fits(self, header_only=False): if header_only: hdu = fits.open(self.fitsfile, mode="update") hdu[0].header = self.header hdu.flush() hdu.close() return fits.update(self.fitsfile, self.data_array, self.header)
def find_rotation( imagedata, rotdegs, mx, my, sx, sy, xo, yo, # for Affine2d pixel, npix, bandpass, over, holeshape, outdir=None): # for nrm_model """ AS AZG 2018 08 Ann Arbor Develop the rotation loop first """ vprint("Before Loop: ", rotdegs) #Extend this name to include multi-paramater searches? psffmt = 'psf_nrm_{0:d}_{1:s}_{2:.3f}um_r{3:.3f}deg.fits' # expect (npix, holeshape, bandpass/um, scl) if hasattr(rotdegs, '__iter__') is False: rotdegs = (rotdegs, ) affine2d_list = create_afflist_rot(rotdegs, mx, my, sx, sy, xo, yo) crosscorr_rots = [] for (rot, aff) in zip(rotdegs, affine2d_list): vprint(aff.name + "...") jw = NRM_Model(mask='jwst', holeshape=holeshape, over=over, affine2d=aff) jw.set_pixelscale(pixel) jw.simulate(fov=npix, bandpass=bandpass, over=over) psffn = psffmt.format(npix, holeshape, bandpass / um, rot) if outdir: fits.PrimaryHDU(data=jw.psf).writeto(outdir + "/" + psffn, overwrite=True) fits.writeto(psffn, jw.psf, overwrite=True) header = fits.getheader(psffn) utils.affinepars2header(header, aff) fits.update(psffn, jw.psf, header=header) crosscorr_rots.append(utils.rcrosscorrelate(imagedata, jw.psf).max()) del jw vprint("Debug: ", crosscorr_rots, rotdegs) rot_measured_d, max_cor = utils.findpeak_1d(crosscorr_rots, rotdegs) vprint("Rotation measured: max correlation {1:.3e}", rot_measured_d, max_cor) # return convenient affine2d return utils.Affine2d(rotradccw=np.pi * rot_measured_d / 180.0, name="{0:.4f}".format(rot_measured_d))
def setUp(self): # directory containing the test data data_dir = os.path.join(os.path.dirname(__file__), 'test_data/affine2d_rot_psf') self.data_dir = data_dir print(data_dir) if not os.path.exists(data_dir): os.makedirs(data_dir) self.fnfmt = data_dir + '/psf_nrm_{2:.1f}_{0}_{1}_{3:.0f}.fits' # expects strings of %(imsize,hole) self.fmt = " ({0:+.3f}, {1:+.3f}) -> ({2:+.3f}, {3:+.3f})" self.pixel = 0.0656 * u.arcsec.to(u.rad) self.npix = 87 self.wave = 4.3e-6 # m self.over = 1 mx, my = 1.0, 1.0 sx, sy = 0.0, 0.0 xo, yo = 0.0, 0.0 affine_ideal = Affine2d(mx=mx, my=my, sx=sx, sy=sy, xo=xo, yo=yo, name="Ideal") for rot in (0, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60, 65, 70, 75, 80, 85, 90, 95): """ print(" rot degrees pre", rot, end='') diagnostic = rot/15.0 - int(rot/15.0) print(" diagnostic", diagnostic, end='') rot = avoidhexsingularity(rot) # in utils print(" rot degrees post", rot) """ rot = avoidhexsingularity(rot) # in utils affine_rot = Affine2d(rotradccw=np.pi * rot / 180.0, name="{0:.0f}".format(rot)) # in utils aff = affine_rot # hexonly g7s6 jwst self.jw = NRM_Model(mask='jwst', holeshape="hexonly", affine2d=aff) self.jw.set_pixelscale(self.pixel * arcsec2rad) self.jw.simulate(fov=self.npix, bandpass=self.wave, over=self.over) psffn = self.fnfmt.format(self.npix, 'hexonly', self.wave / um, rot) fits.writeto(psffn, self.jw.psf, overwrite=True) header = fits.getheader(psffn) header = affinepars2header(header, aff) fits.update(psffn, self.jw.psf, header=header) del self.jw del aff del affine_rot
def moving_wcs_fix(files, ref=None): """Correct IRS FITS WCS for the motion of the targeted moving object. Parameters ---------- files : array of strings A list of files to update. The files are updated in place. ref : tuple The "reference" RA and Dec of the target expressed as a tuple: `(ra_ref, dec_ref)`. This is usually the position of the moving target at the start of the IRS observation. The difference between ra_ref, dec_ref and the RA_REF, DEC_REF in the FITS headers is the motion of the target. Set ref to `None` to use RA_REF and DEC_REF from the first file in the file list as the initial position. [units: degrees] """ from astropy.io import fits from ..util import spherical_coord_rotate assert np.iterable(files), "files must be an array of file names" ra_ref0, dec_ref0 = ref for f in files: im, h = fits.getdata(f, header=True) ra_ref1 = h["RA_REF"] dec_ref1 = h["DEC_REF"] # I found CRVALx missing in some LH files if h.get("CRVAL1") is not None: crval1, crval2 = spherical_coord_rotate( ra_ref1, dec_ref1, ra_ref0, dec_ref0, h["CRVAL1"], h["CRVAL2"]) rarqst, decrqst = spherical_coord_rotate( ra_ref1, dec_ref1, ra_ref0, dec_ref0, h["RA_RQST"], h["DEC_RQST"]) raslt, decslt = spherical_coord_rotate( ra_ref1, dec_ref1, ra_ref0, dec_ref0, h["RA_SLT"], h["DEC_SLT"]) print("{} moved {:.3f} {:.3f}".format(f, (ra_ref1 - ra_ref0) * 3600., (dec_ref1 - dec_ref0) * 3600.)) if h.get("CRVAL1") is not None: h["CRVAL1"] = crval1 h["CRVAL2"] = crval2 h["RA_RQST"] = rarqst h["RA_SLT"] = raslt h["DEC_RQST"] = decrqst h["DEC_SLT"] = decslt h.add_history("WCS updated for moving target motion with mskpy.instrum3ents.spitzer.moving_wcs_fix") fits.update(f, im, h)
def generate_new_offsets_new(name, asprta, suffix, tmp_dir, num_p): print name try: centroids = np.load(tmp_dir + '/offsets_%s.npy' % (suffix)) time = np.load(tmp_dir + '/time_%s.npy' % (suffix)) except IOError: try: centroids = np.load(tmp_dir + '/offsets0_%s.npy' % (suffix)) time = np.load(tmp_dir + '/time0_%s.npy' % (suffix)) except IOError: print 'no file' return 0 print centroids.shape print time.shape output = '../plots/1/%s-%s.pdf' % (name, suffix) dir = os.path.dirname(output) if not os.path.exists(dir): os.makedirs(dir) plt.plot(centroids[:, 0], '.k') plt.savefig('../plots/1/%s-%s_ra.pdf' % (name, suffix), dpi=190) plt.clf() plt.plot(centroids[:, 1], '.k') plt.savefig('../plots/1/%s-%s_dec.pdf' % (name, suffix), dpi=190) plt.clf() #np.save(tmp_dir+'/offsets_%s.npy'%(suffix), centroids) #np.save(tmp_dir+'/time_%s.npy'%(suffix), time) hdulist = pyfits.open(asprta) co_data = hdulist[1].data T = co_data['T'] ra = co_data['ra'] dec = co_data['dec'] roll = co_data['roll'] ra_new = np.interp(time, T, ra) - centroids[:, 0] dec_new = np.interp(time, T, dec) - centroids[:, 1] roll_new = np.interp(time, T, roll) - centroids[:, 2] other = np.zeros((time.shape[0], 8)) array = np.concatenate( [np.array([time, ra_new, dec_new, roll_new]).T, other], axis=1) data = np.core.records.fromarrays(array.transpose(), dtype=[('T', float), ('RA', float), ('DEC', float), ('ROLL', float),\ ('STATUS_FLAG', int), ('ROLL_RAD', float), ('X', float), ('Y', float), ('Z', float), ('XDOT', float), ('YDOT', float), ('ZDOT', float)]) new_file = re.split('-asprta.fits', asprta)[0] + '-' + suffix + '-asprta.fits' os.system('cp {0} {1}'.format(asprta, new_file)) update(new_file, data, 1) hdu = pyfits.open(new_file) print hdu[1].data['RA'].shape print hdu[1].data['DEC'].shape hdu.close()
def predict_all(self, filename, extname={ 'image': 'IMAGE', 'table': 'DETECTION_TABLE' }): det_tab = pd.DataFrame( np.array(getdata(filename, extname['table'])).byteswap().newbyteorder()) # clean data with inf or NaN det_tab = det_tab.replace([np.inf, -np.inf], np.nan) det_tab.dropna(inplace=True) det_tab.reset_index(drop=True, inplace=True) # load image pix_val = getdata(filename, extname['image']) # create stamp DataFrame pixel_col = ['p' + str(i + 1) for i in np.arange(441)] stamps = [ Cutout2D(pix_val, (det_tab.iloc[i]['X_IMAGE'] - 1, det_tab.iloc[i]['Y_IMAGE'] - 1), (21, 21), mode='partial').data.reshape(441) for i in np.arange(det_tab.shape[0]) ] stamps = pd.DataFrame(stamps, columns=pixel_col) # scale detection stamps X = scaling(stamps) X.replace([np.inf, -np.inf], np.nan, inplace=True) X.dropna(inplace=True) det_tab = det_tab.loc[X.index.tolist()] if self.algorithm == 'Convolutional_Neural_Network': X = X.values.reshape(-1, 21, 21, 1) if self.algorithm == 'PCA_Random_Forest': X = self.pca.transform(X) X = self.kbest.transform(X) # make predictions if (self.algorithm == 'Random_Forest') or (self.algorithm == 'PCA_Random_Forest'): pred = self.model.predict(X) elif (self.algorithm == 'Artificial_Neural_Network') or ( self.algorithm == 'Convolutional_Neural_Network'): pred = self.model.predict(X)[:, 1] det_tab['real_bogus'] = pred # update the 'DETECTION_TABLE' including real-bogus score m = Table(det_tab.values, names=det_tab.columns) hdr = getheader(filename, extname='DETECTION_TABLE') update(filename, np.array(m), extname='DETECTION_TABLE', header=hdr)
def generate_new_offsets_new(name, asprta, suffix, tmp_dir, num_p): print name try: centroids = np.load(tmp_dir+'/offsets_%s.npy'%(suffix)) time = np.load(tmp_dir+'/time_%s.npy'%(suffix)) except IOError: try: centroids = np.load(tmp_dir+'/offsets0_%s.npy'%(suffix)) time = np.load(tmp_dir+'/time0_%s.npy'%(suffix)) except IOError: print 'no file' return 0 print centroids.shape print time.shape output = '../plots/1/%s-%s.pdf'%(name, suffix) dir = os.path.dirname(output) if not os.path.exists(dir): os.makedirs(dir) plt.plot(centroids[:,0], '.k') plt.savefig('../plots/1/%s-%s_ra.pdf'%(name, suffix), dpi=190) plt.clf() plt.plot(centroids[:,1], '.k') plt.savefig('../plots/1/%s-%s_dec.pdf'%(name, suffix), dpi=190) plt.clf() #np.save(tmp_dir+'/offsets_%s.npy'%(suffix), centroids) #np.save(tmp_dir+'/time_%s.npy'%(suffix), time) hdulist = pyfits.open(asprta) co_data = hdulist[1].data T = co_data['T'] ra = co_data['ra'] dec = co_data['dec'] roll = co_data['roll'] ra_new = np.interp(time, T, ra) - centroids[:,0] dec_new = np.interp(time, T, dec) - centroids[:,1] roll_new = np.interp(time, T, roll) - centroids[:,2] other = np.zeros((time.shape[0], 8)) array = np.concatenate([np.array([time, ra_new, dec_new, roll_new]).T, other], axis=1) data = np.core.records.fromarrays(array.transpose(), dtype=[('T', float), ('RA', float), ('DEC', float), ('ROLL', float),\ ('STATUS_FLAG', int), ('ROLL_RAD', float), ('X', float), ('Y', float), ('Z', float), ('XDOT', float), ('YDOT', float), ('ZDOT', float)]) new_file = re.split('-asprta.fits', asprta)[0]+'-'+suffix+'-asprta.fits' os.system('cp {0} {1}'.format(asprta, new_file)) update(new_file, data, 1) hdu = pyfits.open(new_file) print hdu[1].data['RA'].shape print hdu[1].data['DEC'].shape hdu.close()
def add_header(self, fromfile='telescope_obj1.fits', tofile='telescope_obj_p0.fits'): os.chdir('/home/dodkins/') hdu = pyfits.open(fromfile) hdr = hdu[0].header if fromfile == tofile: print(fromfile == tofile) hdu = pyfits.open(tofile, mode='update') scidata = hdu[0].data pyfits.update(tofile, scidata, hdr, 0) print(hdu[0].header) print('done')
def write_hduL(fitsfn, hduL): # If file doesn't exist, add the primary header hdu_primary = hduL[0] if os.path.exists(fitsfn) is False: fits.append(fitsfn, hdu_primary.data, header=hdu_primary.header) hduL = hduL[1:] for hdu in hduL: data = hdu.data extname = hdu.header['EXTNAME'] header = hdu.header try: fits.update(fitsfn, data, extname, header=header) except KeyError: fits.append(fitsfn, data, header=header)
def write_hduL(fitsfn,hduL): # If file doesn't exist, add the primary header hdu_primary = hduL[0] if os.path.exists(fitsfn) is False: fits.append(fitsfn, hdu_primary.data, header=hdu_primary.header) hduL = hduL[1:] for hdu in hduL: data = hdu.data extname = hdu.header['EXTNAME'] header = hdu.header try: fits.update(fitsfn, data, extname, header=header) except KeyError: fits.append(fitsfn, data, header=header)
def winbin_fits(image, win, bin, bintype, output=''): if output != '': output = filenames2list(output) else: output = filenames2list(image) for i, fn in enumerate(filenames2list(image, forcelist=True)): d, h = fits.getdata(fn, header=True) if win != 'full': d = d[win[2] - 1:win[3] + win[2] - 1, win[0] - 1:win[0] + win[1] - 1] dc = bintype(d.reshape(d.shape[0], -1, bin[0]), 2) dr = bintype(dc.reshape(-1, bin[1], dc.shape[1]), 1) if (type(output) == str and output == image) or output[i] == fn: fits.update(fn, dr, header=h) elif type(output) == str: fits.writeto(output, dr, header=h) elif isinstance(output, (list, tuple)): fits.writeto(output[i], dr, header=h) return output
def oscan_trim_file(fname, datahdus=0): hdulist = fits.open(fname) nhdus = len(hdulist) if nhdus > 1: istart = 1 else: istart = 0 # loop from first-data to last HDU, unless datahdus is set hduindexes = list(range(nhdus))[istart:] if datahdus != 0: hduindexes = datahdus for i in hduindexes: hdulist = fits.open(fname) data1 = ccdproc.CCDData(hdulist[i].data, unit="adu") data1.header = hdulist[i].header # What happens if file is already overscan-subtracted? # We should probably default to using a model if modeling: oscan1 = ccdproc.subtract_overscan( data1, fits_section=data1.header['BIASSEC'], add_keyword={ 'overscan': True, 'calstat': 'O' }, model=models.Polynomial1D(1)) else: oscan1 = ccdproc.subtract_overscan( data1, fits_section=data1.header['BIASSEC'], add_keyword={ 'overscan': True, 'calstat': 'O' }, model=None) trim1 = ccdproc.trim_image(oscan1, fits_section=oscan1.header['TRIMSEC'], add_keyword={ 'trimmed': True, 'calstat': 'OT' }) fits.update(fname, trim1.data, header=trim1.header, ext=i) hdulist.close() mylog("Overscan and trim {0}".format(fname)) return
def store_frame_to_fits(fits_address, fits_hdu, ext_name): if fits_address.is_file(): try: fits.update(fits_address, data=fits_hdu.data, header=fits_hdu.header, extname=ext_name, verify=True) except KeyError: fits.append(fits_address, data=fits_hdu.data, header=fits_hdu.header, extname=ext_name) else: fits_hdu.writeto(fits_address, overwrite=True, output_verify='fix') return
def test_image_extension_update_header(self): """ Test that _makehdu correctly includes the header. For example in the fits.update convenience function. """ filename = self.temp('twoextension.fits') hdus = [fits.PrimaryHDU(np.zeros((10, 10))), fits.ImageHDU(np.zeros((10, 10)))] fits.HDUList(hdus).writeto(filename) fits.update(filename, np.zeros((10, 10)), header=fits.Header([('WHAT', 100)]), ext=1) h_out = fits.getheader(filename, ext=1) assert h_out['WHAT'] == 100
def oscan_trim_file(fname): hdulist = fits.open(fname) nhdus = len(hdulist) if nhdus > 1: istart = 1 else: istart = 0 # loop from first-data to last HDU. for i in range(nhdus)[istart:]: hdulist = fits.open(fname) data1 = ccdproc.CCDData(hdulist[i].data, unit="adu") data1.header = hdulist[i].header # What happens if file is already overscan-subtracted? if modeling: oscan1 = ccdproc.subtract_overscan( data1, fits_section=data1.header['BIASSEC'], add_keyword={ 'overscan': True, 'calstat': 'O' }, model=models.Polynomial1D(1)) else: oscan1 = ccdproc.subtract_overscan( data1, fits_section=data1.header['BIASSEC'], add_keyword={ 'overscan': True, 'calstat': 'O' }, model=None) trim1 = ccdproc.trim_image(oscan1, fits_section=oscan1.header['TRIMSEC'], add_keyword={ 'trimmed': True, 'calstat': 'OT' }) fits.update(fname, trim1.data, header=trim1.header, ext=i) hdulist.close() return
def run_sector_camera_chip(base_dir, sector, camera, chip): pattern = os.path.join(base_dir, "hlsp_eleanor_*.fits") outdir = base_dir open(os.path.join(outdir, "index.auto"), "w").close() open(os.path.join(os.path.dirname(outdir), "index.auto"), "w").close() fns = list(sorted(glob.glob(pattern))) # Ensures no postcards have been repeated fns = np.unique(fns) # Writes in the background after making the postcards print("Computing backgrounds...") for fn in tqdm.tqdm(fns, total=len(fns)): with fits.open(fn) as hdu: bkg = calc_2dbkg(hdu[2].data, hdu[1].data['QUALITY'], hdu[1].data['TSTART']) # Checks to make sure there isn't a background extension already if len(hdu) < 5: fits.append(fn, bkg) else: fits.update(fn, bkg, 4)
def unbias(input_file, output_file, bias_method, bias_method_col=None, superbias_file=None): # build output file by copying input shutil.copyfile(input_file, output_file) ccd = get_ccd_from_id(None, input_file, []) hdulist = fits.open(input_file) if superbias_file is not None: superbias_ccd = get_ccd_from_id(None, superbias_file, []) else: superbias_ccd = None amps = get_amp_list(ccd) offset = get_amp_offset(ccd, superbias_ccd) for i, amp in enumerate(amps): regions = get_geom_regions(ccd, amp) serial_oscan = regions['serial_overscan'] parallel_oscan = regions['parallel_overscan'] img = get_raw_image(ccd, amp) if superbias_ccd is not None: superbias_im = get_raw_image(superbias_frame, amp + offset) else: superbias_im = None image = unbias_amp(img, serial_oscan, bias_type=bias_method, superbias_im=superbias_im, bias_type_col=bias_method_col, parallel_oscan=parallel_oscan) fits.update(output_file, image.image.array, amp, header=hdulist[amp].header)
def renormalize_by_flat(image, flat, read_from_file=False, datahdus=0): if read_from_file == True: hduimage = fits.open(image) hduflat = fits.open(flat) else: hduimage = image hduflat = flat nhdus = len(hdulist) # Do nothing if image only has 0-1 data extension if nhdus <= 1: mylog("Don't need to renormalize a 1-extension flat, returning") return secmeans = np.zeros(nhdus - 1) hduindexes = list(range(nhdus))[1:] if datahdus != 0: hduindexes = datahdus for i in hduindexes: secmeans[i - 1] = hduflat[i].data.mean() totmean = secmeans.mean() secmeans = secmeans / totmean mylog("Renormalized flat means by extension: {0} ".format(secmeans)) for i in hduindexes: #hduimage[i].data = hduimage[i].data / secmeans[i-1] hduimage[i].divide(secmeans[i - 1] * hduimage[i].unit) if read_from_file == True: fits.update(image, hduimage[i].data, header=hduimage[i].header, ext=i) if read_from_file == True: hduimage.close() hduflat.close() else: # This may be unnecessary if these are already pointing at identical structure image = hduimage # done? return
def update_fits_header(files, modify={}, remove=()): files = filenames2list(files, forcelist=True) for file in files: data, head = fits.getdata(file, header=True) for key in modify: if not isinstance(modify[key], dict): head[key] = modify[key] else: func = modify[key].get('func', None) args = modify[key].get('args', None) if func and args is None: head[key] = func() if func is None and isinstance(args, str): head[key] = head[args] if func is not None and args is not None: if type(args) == str: head[key] = func(head[args]) else: head[key] = func(*[head[arg] for arg in args]) if remove: for key in remove: if key in head: head.remove(key) fits.update(file, data, header=head)
def generate_new_offsets_new(name, asprta, suffix, tmp_dir, num_p): print name #centers = [] #center_time = [] ''' hdulist = pyfits.open('../AIS_GAL_SCAN/asprta/%s-asprta.fits'%name) initial = 0 final = hdulist[1].data['T'].shape[0]-1 hdulist.close() for i in range(initial, final+1): #center = np.load('../data/%s/cata/centroids_rot%d.npy'%(name, i)) #if center.shape!=(2,3): # print i, center.shape centers.append(np.load(tmp_dir+'/centroids_rot%d.npy'%(i))) center_time.append(np.load(tmp_dir+'/time_rot%d.npy'%(i))) ''' ''' for i in range(num_p): centers.append(np.load(tmp_dir+'/centroids_tmp%d.npy'%(i))) center_time.append(np.load(tmp_dir+'/time_tmp%d.npy'%(i))) centroids = np.concatenate(centers, axis=0) time = np.concatenate(center_time, axis=0) ''' try: centroids = np.load(tmp_dir+'/offsets_%s.npy'%(suffix)) time = np.load(tmp_dir+'/time_%s.npy'%(suffix)) except IOError: try: centroids = np.load(tmp_dir+'/offsets0_%s.npy'%(suffix)) time = np.load(tmp_dir+'/time0_%s.npy'%(suffix)) except IOError: print 'no file' return 0 scst_file = pyfits.open('/scratch/dw1519/galex/AIS_GAL_SCAN/scst/%s-scst.fits'%name) scst_data = scst_file[1].data scst_time = scst_data['pktime'].copy() hv = scst_data['hvnom_nuv'].copy() scst_file.close() scst_ix = np.digitize(time, scst_time)-1 ix_mask = scst_ix<scst_time.shape[0] #scst_ix = scst_ix[ix_mask] #time = time[ix_mask] #time2run = time_c[hv[scst_ix]>0] data_mask1 = hv[scst_ix]>0 ct1 = np.trim_zeros(centroids[:,0],'f') lz = centroids[:,0].shape[0]-ct1.shape[0] tz = np.trim_zeros(centroids[:,0],'b').shape[0] data_mask2 = np.zeros(centroids.shape[0]) data_mask2[lz:tz] = 1 data_mask2 = data_mask2>0 data_mask = data_mask1 & data_mask2 ''' rng = np.random.RandomState(42) clf = IsolationForest(max_samples=100, random_state=rng) X_train = np.column_stack([time[data_mask>0], centroids[data_mask>0,0:2]]) clf.fit(X_train) y_pred_train = clf.predict(X_train) outlier = y_pred_train==-1 ''' outlier1 = (np.sum(centroids[data_mask],axis=1)==0) lim=10#6#6#5#3.5 w = 50#40#40#30 out_mask = np.zeros(np.sum(data_mask)) z, mz = moving_stat(centroids[data_mask,0],out_mask,half_win=w) outlier = mz>lim z, mz = moving_stat(centroids[data_mask,1],out_mask,half_win=w) outlier = outlier | (mz>lim) a=np.zeros(np.sum(data_mask)) a[0:100] = 1 a = a.astype(bool) outlier = (outlier | outlier1) | a #| (centroids[data_mask,1]>.07) #| (((centroids[data_mask,0]>.03) | (centroids[data_mask,0]<0)) & a) c_new = np.zeros(centroids.shape) ''' spl_ra = splrep(time[data_mask][~outlier], centroids[data_mask,0][~outlier]) spl_dec = splrep(time[data_mask][~outlier], centroids[data_mask,1][~outlier]) c_new[data_mask,0] = splev(time[data_mask], spl_ra) c_new[data_mask,1] = splev(time[data_mask], spl_dec) ''' #fra = interpolate.interp1d(time[data_mask][~outlier], centroids[data_mask,0][~outlier]) #fdec = interpolate.interp1d(time[data_mask][~outlier], centroids[data_mask,1][~outlier]) if time[data_mask][-1]>time[data_mask][~outlier][-1]: ex_mask = time>time[data_mask][~outlier][-1] c_new[data_mask&ex_mask,0] = centroids[data_mask][~outlier][-1,0] c_new[data_mask&ex_mask,1] = centroids[data_mask][~outlier][-1,1] outlier = outlier[~ex_mask[data_mask]] data_mask = data_mask &(~ex_mask) if time[data_mask][0]<time[data_mask][~outlier][0]: ex_mask = time<time[data_mask][~outlier][0] c_new[data_mask&ex_mask,0] = centroids[data_mask][~outlier][0,0] c_new[data_mask&ex_mask,1] = centroids[data_mask][~outlier][0,1] outlier = outlier[~ex_mask[data_mask]] data_mask = data_mask &(~ex_mask) spl_ra = splrep(time[data_mask][~outlier], centroids[data_mask,0][~outlier]) spl_dec = splrep(time[data_mask][~outlier], centroids[data_mask,1][~outlier]) c_new[data_mask,0] = splev(time[data_mask], spl_ra) c_new[data_mask,1] = splev(time[data_mask], spl_dec) ''' else: #c_new[data_mask,0] = fra(time[data_mask]) #c_new[data_mask,1] = fdec(time[data_mask]) spl_ra = splrep(time[data_mask][~outlier], centroids[data_mask,0][~outlier]) spl_dec = splrep(time[data_mask][~outlier], centroids[data_mask,1][~outlier]) c_new[data_mask,0] = splev(time[data_mask], spl_ra) c_new[data_mask,1] = splev(time[data_mask], spl_dec) ''' print centroids.shape print time.shape output = '../plots/0/%s-%s.pdf'%(name, suffix) dir = os.path.dirname(output) if not os.path.exists(dir): os.makedirs(dir) x = np.arange(centroids.shape[0]) plt.plot(x[data_mask],centroids[data_mask,0], '.k', markersize=2) #plt.plot(x[data_mask][outlier], centroids[data_mask,0][outlier], '.r', markersize=2) plt.plot(x[data_mask],c_new[data_mask,0], '.r', markersize=2) #plt.plot(x[ex_mask], centroids[ex_mask,0], '.r', markersize=2) #plt.xlim(500,750) plt.tight_layout() plt.savefig('../plots/0/%s-%s_ra.pdf'%(name, suffix), dpi=190) plt.clf() plt.plot(x[data_mask], centroids[data_mask,1], '.k', markersize=2) #plt.plot(x[data_mask][outlier], centroids[data_mask,1][outlier], '.r', markersize=2) plt.plot(x[data_mask],c_new[data_mask,1], '.r', markersize=2) #plt.plot(x[ex_mask], centroids[ex_mask,1], '.r', markersize=2) #plt.xlim(500,750) plt.tight_layout() plt.savefig('../plots/0/%s-%s_dec.pdf'%(name, suffix), dpi=190) plt.clf() #np.save(tmp_dir+'/offsets_%s.npy'%(suffix), centroids) #np.save(tmp_dir+'/time_%s.npy'%(suffix), time) hdulist = pyfits.open(asprta) co_data = hdulist[1].data T = co_data['T'] ra = co_data['ra'] dec = co_data['dec'] roll = co_data['roll'] ra_new = np.interp(time, T, ra) - c_new[:,0] dec_new = np.interp(time, T, dec) - c_new[:,1] roll_new = np.interp(time, T, roll) - c_new[:,2] other = np.zeros((time.shape[0], 8)) array = np.concatenate([np.array([time, ra_new, dec_new, roll_new]).T, other], axis=1) data = np.core.records.fromarrays(array.transpose(), dtype=[('T', float), ('RA', float), ('DEC', float), ('ROLL', float),\ ('STATUS_FLAG', int), ('ROLL_RAD', float), ('X', float), ('Y', float), ('Z', float), ('XDOT', float), ('YDOT', float), ('ZDOT', float)]) new_file = re.split('-asprta.fits', asprta)[0]+'-'+suffix+'-asprta.fits' os.system('cp {0} {1}'.format(asprta, new_file)) update(new_file, data, 1) hdu = pyfits.open(new_file) print hdu[1].data['RA'].shape print hdu[1].data['DEC'].shape hdu.close()
def find_scale( imagedata, affine_best, # best current guess at data geometry cf analytical ideal scales, # scales are near-unity pixel, npix, bandpass, over, holeshape, outdir=None): # for nrm_model """ Preserve incoming "pixel" value, put the scale correction into the Affine2d object Is that kosher??? Should we change the pixel scale and leave affine2d the same? Affine2d can also incorporate unequal x and y scales, shears... For now place scale corrections into the Affine2d object Note - placing isotropic scale change into Affine2d is equivalent to changing the effective image distance in the optical train while insisting that the mask physical geometry does not change, and the wavelength is perfectly knowm AS 2018 10 """ affine_best.show("\tfind_scale") vprint("\tBefore Loop: ", scales) #Extend this name to include multi-paramater searches? psffmt = 'psf_nrm_{0:d}_{1:s}_{2:.3f}um_scl{3:.3f}.fits' # expect (npix, holeshape, bandpass/um, scl) if hasattr(scales, '__iter__') is False: scales = (scales, ) affine2d_list = create_afflist_scales(scales, affine_best.mx, affine_best.my, affine_best.sx, affine_best.sy, affine_best.xo, affine_best.yo) crosscorrs = [] for (scl, aff) in zip(scales, affine2d_list): vprint(aff.name + "...") jw = NRM_Model(mask='jwst', holeshape=holeshape, over=over, affine2d=aff) jw.set_pixelscale(pixel) jw.simulate(fov=npix, bandpass=bandpass, over=over) psffn = psffmt.format(npix, holeshape, bandpass[:, 1][0] / um, scl) if outdir: fits.PrimaryHDU(data=jw.psf).writeto(outdir + "/" + psffn, overwrite=True) fits.writeto(psffn, jw.psf, overwrite=True) header = fits.getheader(psffn) utils.affinepars2header(header, aff) fits.update(psffn, jw.psf, header=header) crosscorrs.append(utils.rcrosscorrelate(imagedata, jw.psf).max()) del jw vprint("\tfind_affine2d_parameters: crosscorrelations", crosscorrs) vprint("\tfind_affine2d_parameters: scales", scales) scl_measured, max_cor = utils.findpeak_1d(crosscorrs, scales) vprint( "\tfind_affine2d_parameters factor measured {0:.5f} Max correlation {1:.3e}" .format(scl_measured, max_cor)) vprint("\tfind_affine2d_parameters pitch from header {0:.3f} mas".format( pixel * rad2mas)) vprint( "\tfind_affine2d_parameters pitch {0:.3f} mas (implemented using affine2d)" .format(scl_measured * pixel * rad2mas)) # return convenient affine2d return utils.Affine2d(affine_best.mx * scl_measured, affine_best.my * scl_measured, affine_best.sx * scl_measured, affine_best.sy * scl_measured, affine_best.xo * scl_measured, affine_best.yo * scl_measured, name="scale_{0:.4f}".format(scl))
def update_fits(self): data = np.vstack((self.wav,self.spec)) if self.wav else self.spec fits.update(self.file, data, self.header)
def setUp(self): allholes = ('b4', 'c2', 'b5', 'b2', 'c1', 'b6', 'c6') b4, c2, b5, b2, c1, b6, c6 = allholes self.hc = (b2, b6, b5) # holechoices self.hstr = holes2string(self.hc) self.holeshape = "hex" # directory containing the test data data_dir = os.path.join(os.path.dirname(__file__), 'test_data/affine2d_makemodel_rot') self.data_dir = data_dir print(data_dir) if not os.path.exists(data_dir): os.makedirs(data_dir) # expects strings of % (self.npix, self.holeshape, self.wave/um, rot, self.hstr) self.fnfmt = data_dir + '/psf_nrm_{2:.1f}_{0}_{1}_{3:.0f}_{4:s}.fits' self.fmt = " ({0:+.3f}, {1:+.3f}) -> ({2:+.3f}, {3:+.3f})" self.pixel = 0.0656 self.npix = 87 self.wave = 4.3e-6 # m self.over = 11 mx, my = 1.0, 1.0 sx, sy = 0.0, 0.0 xo, yo = 0.0, 0.0 affine_ideal = Affine2d(mx=mx, my=my, sx=sx, sy=sy, xo=xo, yo=yo, name="Ideal") rots = (0, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60, 65, 70, 75, 80, 85, 90, 95) rots = ( 0.000, 10.000, ) for rot in rots: rot = avoidhexsingularity(rot) # in utils affine_rot = Affine2d(rotradccw=np.pi * rot / 180.0, name="{0:.0f}".format(rot)) # in utils aff = affine_rot # holeshape is hex or circ g7s6 jwst # because model_array requires # a primary beam for one slice of the model self.jw = NRM_Model( mask='jwst', holeshape=self.holeshape, #chooseholes=self.hc, affine2d=aff) self.jw.set_pixelscale(self.pixel * arcsec2rad) self.jw.simulate(fov=self.npix, bandpass=self.wave, over=self.over) # write psf psffn = self.fnfmt.format(self.npix, self.holeshape, self.wave / um, rot, self.hstr) fits.writeto(psffn, self.jw.psf, overwrite=True) header = fits.getheader(psffn) header = affinepars2header(header, aff) fits.update(psffn, self.jw.psf, header=header) print("test: psf shape", self.jw.psf.shape) modelslices = self.jw.make_model(fov=self.npix, bandpass=self.wave, over=self.over) print("test: modelslices type", type(modelslices)) print("test: modelslices shape", modelslices.shape) modelfn = psffn.replace("psf_nrm", "model") # write model model_for_fitsfile = np.zeros( (modelslices.shape[2], modelslices.shape[0], modelslices.shape[1])) for sl in range(modelslices.shape[2]): model_for_fitsfile[sl, :, :] = modelslices[:, :, sl] print("test: model_for_fitsfile type", type(model_for_fitsfile)) print("test: model_for_fitsfile shape", model_for_fitsfile.shape) fits.writeto(modelfn, model_for_fitsfile[6, :, :], overwrite=True) header = fits.getheader(modelfn) header = affinepars2header(header, aff) fits.update(modelfn, model_for_fitsfile[6, :, :], header=header) del self.jw del aff del affine_rot
logf.write(str(datetime.now() - startTime)+" - Done writing ASCII\n") # Write FITS files with simulated data (in units of electrons) hdu0=fits.PrimaryHDU() hdu1=fits.ImageHDU() hdu2=fits.ImageHDU() hdu3=fits.ImageHDU() hdu4=fits.ImageHDU() hdul=fits.HDUList([hdu0, hdu1, hdu2, hdu3, hdu4]) hdul.writeto(tmpdir+'/lcoetc_out.fits', clobber=1) fits.update(tmpdir+'/lcoetc_out.fits', np.transpose(neobj2d+nesky2d+mkrandnoise(nenoise2d)), 1) fits.update(tmpdir+'/lcoetc_out.fits', np.transpose(neobj2d), 2) fits.update(tmpdir+'/lcoetc_out.fits', np.transpose(nesky2d), 3) fits.update(tmpdir+'/lcoetc_out.fits', np.transpose(nenoise2d), 4) hdul=fits.open(tmpdir+'/lcoetc_out.fits') for i in np.arange(4): hdul[i+1].header.set('DISPAXIS', '1') hdul[i+1].header.set('CTYPE1', 'LINEAR') hdul[i+1].header.set('CUNIT1', 'Angstroms') hdul[i+1].header.set('CRPIX1', '1') hdul[i+1].header.set('CRVAL1', str(lam[0])) hdul[i+1].header.set('CD1_1', str(ddisp))
def generate_first_offsets(name): print name hdulist = pyfits.open('../AIS_GAL_SCAN/asprta/%s-asprta.fits'%name) initial = 1 final = hdulist[1].data['T'].shape[0]-1 centers = [] center_time = [] for i in range(initial, final+1): c = np.load('../data/%s/cata/centroids_rot%d.npy'%(name, i)) #if c.shape == (1,3): # c = c[:,:2] centers.append(c) center_time.append(np.load('../data/%s/cata/time_rot%d.npy'%(name, i))) print c.shape centroids = np.concatenate(centers, axis=0) time = np.concatenate(center_time, axis=0) print centroids.shape out_mask = np.zeros(centroids.shape[0]) z, mz = moving_stat(centroids[:,0], out_mask, half_win=100) outliers = np.zeros(centroids.shape[0]) outliers[mz>3.5] = 1 outliers[out_mask>0] = 1 outliers = outliers>0 index = np.arange(centroids.shape[0]) centroids[outliers, 0] = np.interp(index[outliers], index[~outliers], centroids[~outliers,0]) z, mz = moving_stat(centroids[:,1], out_mask, half_win=100) outliers = np.zeros(centroids.shape[0]) outliers[mz>3.5] = 1 outliers[out_mask>0] = 1 outliers = outliers>0 index = np.arange(centroids.shape[0]) centroids[outliers, 1] = np.interp(index[outliers], index[~outliers], centroids[~outliers,1]) output = "../plots/%s/cata/output.csv"%(name) dir = os.path.dirname(output) if not os.path.exists(dir): os.makedirs(dir) plt.plot(centroids[:,0], '.b') plt.savefig('../plots/%s/cata/offsets_10_new_half.pdf'%name, dpi=190) plt.clf() np.save('../data/%s/cata/time%d_10_new_half.npy'%(name, initial), time) np.save('../data/%s/cata/offsets%d_10_new_half.npy'%(name, initial), centroids) co_data = hdulist[1].data T = co_data['T'] ra = co_data['ra'] dec = co_data['dec'] roll = co_data['roll'] ra_new = np.interp(time, T, ra) - centroids[:,0] dec_new = np.interp(time, T, dec) - centroids[:,1] roll_new = np.interp(time, T, roll) - centroids[:,2] other = np.zeros((time.shape[0], 8)) array = np.concatenate([np.array([time, ra_new, dec_new, roll_new]).T, other], axis=1) data = np.core.records.fromarrays(array.transpose(), dtype=[('T', float), ('RA', float), ('DEC', float), ('ROLL', float),\ ('STATUS_FLAG', int), ('ROLL_RAD', float), ('X', float), ('Y', float), ('Z', float), ('XDOT', float), ('YDOT', float), ('ZDOT', float)]) new_file = '../AIS_GAL_SCAN/asprta/%s-cal-asprta.fits'%(name) os.system('cp ../AIS_GAL_SCAN/asprta/%s-asprta.fits ../AIS_GAL_SCAN/asprta/%s-cal-asprta.fits'%(name, name)) update(new_file, data, 1) hdu = pyfits.open(new_file) print hdu[1].data['RA'].shape print hdu[1].data['DEC'].shape hdu.close() #asp_cal.interpolate_offsets(name, 1., centroids) tmp_files = glob.glob("../data/%s/cata/centroids_rot*"%name) for tmp_file in tmp_files: os.remove(tmp_file) tmp_files = glob.glob("../data/%s/cata/time_rot*"%name) for tmp_file in tmp_files: os.remove(tmp_file) '''
def main(folder, quiet=0): """analysis.main(folder, quiet=0) This script gives the number of "observed" stars from the sampled datafiles in "folder"/ according to the selection criteria from Yusef-Zadeh et al Parameters ---------- folder String: Specifiecs the folder where the files are quiet boolean: =1 suppresses all standard output Returns ------- returns a file named __expected_number in which contains a numpy array of the simulation parameters number, A_v, Aperature_size, Age and the expected 'detected' number The first line of the file is an ','-seperated head of the contained informations """ if quiet: output_stream = StringIO() else: output_stream = sys.stdout color1 = "I4" #filter system for first color of CMD color2 = "M1" #filter system for second color of CMD zeromagc1 = zero.zero_mag[color1] zeromagc2 = zero.zero_mag[color2] min_mag = 8. #minimal observation limit max_mag = 0. #maximal observation limit #getting file list files = sorted(os.listdir('%s/%s' % (os.getcwdu(), folder))) out = [] for fil in files: #only using files created by the automated simulation if fil.startswith('sim_') and not 'settings' in fil.encode("ascii"): print("%s/%s" % (folder,fil.encode("ascii")), file=output_stream) # Read in hdulist = fits.open('%s/%s' %(folder,fil)) data = hdulist[1].data #calculating magnitudes from fluxes and converting to CMD-data x = -2.5*(np.log10(data['c%s' % color1]/zeromagc1) - np.log10(data['c%s' % color2]/zeromagc2)) y = -2.5*(np.log10(data['c%s' % color2]/zeromagc2)) sel = np.logical_and( (y > -10./3. * (x-1.) + 10.), np.logical_and(max_mag < y, y < min_mag)) sel = np.logical_and(sel, y < -x + 12.) n = sum(sel) t = Table(hdulist[1].data) if 'sel' in t.columns: t.remove_column('sel') t.add_column(Column(name='sel', data=sel.astype('int'))) hdulist[1].data = np.array(t) tmp, av, apera, age = fil.split('_') fits.update('%s/%s' %(folder,fil), np.array(t), ext = 1, clobber=True) out.append([av, apera, age, n]) #writing obtained data to "folder/__expected_number" head = ['#', 'AV', 'Aperature_size', 'Age', 'Expected_number'] f = open('%s/__expected_number' % folder, 'w') f.write(','.join(head)+'\n' ) np.savetxt(f, np.asarray(out).astype(int)) f.close() print ("Analysed %s files and saved output to %s" % (len(out),'%s/__expected_number' % folder), file=output_stream)
def main(folder, quiet=0): """analysis.main(folder, quiet=0) This script gives the number of "observed" stars from the sampled datafiles in "folder"/ according to the selection criteria from Yusef-Zadeh et al Parameters ---------- folder String: Specifiecs the folder where the files are quiet boolean: =1 suppresses all standard output Returns ------- returns a file named __expected_number in which contains a numpy array of the simulation parameters number, A_v, Aperature_size, Age and the expected 'detected' number The first line of the file is an ','-seperated head of the contained informations """ if quiet: output_stream = StringIO() else: output_stream = sys.stdout color1 = "I4" #filter system for first color of CMD color2 = "M1" #filter system for second color of CMD zeromagc1 = zero.zero_mag[color1] zeromagc2 = zero.zero_mag[color2] min_mag = 8. #minimal observation limit max_mag = 0. #maximal observation limit #getting file list files = sorted(os.listdir('%s/%s' % (os.getcwdu(), folder))) out = [] for fil in files: #only using files created by the automated simulation if fil.startswith('sim_') and not 'settings' in fil.encode("ascii"): print("%s/%s" % (folder, fil.encode("ascii")), file=output_stream) # Read in hdulist = fits.open('%s/%s' % (folder, fil)) data = hdulist[1].data #calculating magnitudes from fluxes and converting to CMD-data x = -2.5 * (np.log10(data['c%s' % color1] / zeromagc1) - np.log10(data['c%s' % color2] / zeromagc2)) y = -2.5 * (np.log10(data['c%s' % color2] / zeromagc2)) sel = np.logical_and((y > -10. / 3. * (x - 1.) + 10.), np.logical_and(max_mag < y, y < min_mag)) sel = np.logical_and(sel, y < -x + 12.) n = sum(sel) t = Table(hdulist[1].data) if 'sel' in t.columns: t.remove_column('sel') t.add_column(Column(name='sel', data=sel.astype('int'))) hdulist[1].data = np.array(t) tmp, av, apera, age = fil.split('_') fits.update('%s/%s' % (folder, fil), np.array(t), ext=1, clobber=True) out.append([av, apera, age, n]) #writing obtained data to "folder/__expected_number" head = ['#', 'AV', 'Aperature_size', 'Age', 'Expected_number'] f = open('%s/__expected_number' % folder, 'w') f.write(','.join(head) + '\n') np.savetxt(f, np.asarray(out).astype(int)) f.close() print("Analysed %s files and saved output to %s" % (len(out), '%s/__expected_number' % folder), file=output_stream)
def generate_new_offsets_new(name, asprta, suffix, tmp_dir, num_p): print name #centers = [] #center_time = [] ''' hdulist = pyfits.open('../AIS_GAL_SCAN/asprta/%s-asprta.fits'%name) initial = 0 final = hdulist[1].data['T'].shape[0]-1 hdulist.close() for i in range(initial, final+1): #center = np.load('../data/%s/cata/centroids_rot%d.npy'%(name, i)) #if center.shape!=(2,3): # print i, center.shape centers.append(np.load(tmp_dir+'/centroids_rot%d.npy'%(i))) center_time.append(np.load(tmp_dir+'/time_rot%d.npy'%(i))) ''' ''' for i in range(num_p): centers.append(np.load(tmp_dir+'/centroids_tmp%d.npy'%(i))) center_time.append(np.load(tmp_dir+'/time_tmp%d.npy'%(i))) centroids = np.concatenate(centers, axis=0) time = np.concatenate(center_time, axis=0) ''' try: centroids = np.load(tmp_dir+'/offsets_%s.npy'%(suffix)) time = np.load(tmp_dir+'/time_%s.npy'%(suffix)) except IOError: try: centroids = np.load(tmp_dir+'/offsets0_%s.npy'%(suffix)) time = np.load(tmp_dir+'/time0_%s.npy'%(suffix)) except IOError: print 'no file' return 0 ct1 = np.trim_zeros(centroids[:,0],'f') lz = centroids[:,0].shape[0]-ct1.shape[0] tz = np.trim_zeros(centroids[:,0],'b').shape[0] data_mask = np.zeros(centroids.shape[0]) data_mask[lz:tz] = 1 lim=10 out_mask = np.zeros(tz-lz) z, mz = moving_stat(centroids[data_mask>0,0],out_mask,half_win=100) outlier = mz>lim z, mz = moving_stat(centroids[data_mask>0,1],out_mask,half_win=100) outlier = outlier | (mz>lim) c_new = np.zeros(centroids.shape) spl_ra = splrep(time[data_mask>0][~outlier], centroids[data_mask>0,0][~outlier]) spl_dec = splrep(time[data_mask>0][~outlier], centroids[data_mask>0,1][~outlier]) c_new[data_mask>0,0] = splev(time[data_mask>0], spl_ra) c_new[data_mask>0,1] = splev(time[data_mask>0], spl_dec) print centroids.shape print time.shape output = '../plots/0/%s-%s.pdf'%(name, suffix) dir = os.path.dirname(output) if not os.path.exists(dir): os.makedirs(dir) plt.plot(centroids[:,0], '.k') plt.plot(c_new[:,0], '.r') plt.savefig('../plots/0/%s-%s_ra.pdf'%(name, suffix), dpi=190) plt.clf() plt.plot(centroids[:,1], '.k') plt.plot(c_new[:,1], '.r') plt.savefig('../plots/0/%s-%s_dec.pdf'%(name, suffix), dpi=190) plt.clf() #np.save(tmp_dir+'/offsets_%s.npy'%(suffix), centroids) #np.save(tmp_dir+'/time_%s.npy'%(suffix), time) hdulist = pyfits.open(asprta) co_data = hdulist[1].data T = co_data['T'] ra = co_data['ra'] dec = co_data['dec'] roll = co_data['roll'] ra_new = np.interp(time, T, ra) - c_new[:,0] dec_new = np.interp(time, T, dec) - c_new[:,1] roll_new = np.interp(time, T, roll) - c_new[:,2] other = np.zeros((time.shape[0], 8)) array = np.concatenate([np.array([time, ra_new, dec_new, roll_new]).T, other], axis=1) data = np.core.records.fromarrays(array.transpose(), dtype=[('T', float), ('RA', float), ('DEC', float), ('ROLL', float),\ ('STATUS_FLAG', int), ('ROLL_RAD', float), ('X', float), ('Y', float), ('Z', float), ('XDOT', float), ('YDOT', float), ('ZDOT', float)]) new_file = re.split('-asprta.fits', asprta)[0]+'-'+suffix+'-asprta.fits' os.system('cp {0} {1}'.format(asprta, new_file)) update(new_file, data, 1) hdu = pyfits.open(new_file) print hdu[1].data['RA'].shape print hdu[1].data['DEC'].shape hdu.close()
extname = f'{151}-{153}_linelog' cols = fits.ColDefs(list_columns) hdu = fits.BinTableHDU.from_columns(cols, name=extname) # Safe extra data in header for param, value in fit_results['Fitting_results'].items(): hdu.header[f'hierarch {param}'] = value[0] hdu.header[f'hierarch {param}_err'] = value[1] for i, label in enumerate(inputLabels): hdu.header[f'hierarch flux_{label}'] = inputFlux[i] hdu.header[f'hierarch err_{label}'] = inputErr[i] fits.update(db_fits, data=hdu.data, header=hdu.header, extname=extname, verify=True) if db_fits.is_file(): try: print('Updating') fits.update(db_fits, data=hdu.data, header=hdu.header, extname=extname, verify=True) except: print('Appending') fits.append(db_fits, data=hdu.data, header=hdu.header, extname=extname) else:
def clean (usr_imgfile, usr_outfile): #numerixenv.check() #Temporary NUMERIX environment check print("puftcorr version %s" %__version__) print("Input file: %s" %usr_imgfile) print("Output file: %s" %usr_outfile) imgfile = osfn(usr_imgfile) outfile = osfn(usr_outfile) # check for existence of output file if os.access(outfile,os.F_OK): s = "\nERROR: Output file %s already exists\n" %(outfile) sys.stdout.write(s) return # create the output file as a copy of the input raw file shutil.copyfile (imgfile, outfile) # retrieve the input file img = InputFile(imgfile) # set correction parameter values pars = params(img.camera) # loop over readouts in the input file for i in range(img.nsamp): imset = i+1 if imset == 1: s = " processing imset 1" elif imset == img.nsamp: s = " %d\n" % imset else: s = " %d" % imset sys.stdout.write(s) sys.stdout.flush() # get individual readout im = Readout(img,imset) # rotate, if necessary if img.camera == 1: im.data = ndimage.rotate(im.data,-90,mode='nearest') elif img.camera == 3: im.data = ndimage.rotate(im.data,180,mode='nearest') # get correction image im = get_corr(im, pars) # rotate back, if necessary if img.camera == 1: im.data = ndimage.rotate(im.data,+90,mode='nearest') elif img.camera == 3: im.data = ndimage.rotate(im.data,180,mode='nearest') # subtract correction image from original raw image im.data = img.f['sci',imset].data - im.data # make sure corrected pixel values don't go off the # ends of the Int16 data range before writing to output im.data = np.clip(im.data,-32768.0,32767.0) # write corrected image to output file pyfits.update(outfile,im.data.astype(np.dtype('int16')), 'sci',imset, header=im.header) # close the input files img.f.close() img.dark.close() return
def generate_first_offsets(name): print name hdulist = pyfits.open('../AIS_GAL_SCAN/asprta/%s-asprta.fits' % name) initial = 1 final = hdulist[1].data['T'].shape[0] - 1 centers = [] center_time = [] for i in range(initial, final + 1): c = np.load('../data/%s/cata/centroids_rot%d.npy' % (name, i)) #if c.shape == (1,3): # c = c[:,:2] centers.append(c) center_time.append( np.load('../data/%s/cata/time_rot%d.npy' % (name, i))) print c.shape centroids = np.concatenate(centers, axis=0) time = np.concatenate(center_time, axis=0) print centroids.shape out_mask = np.zeros(centroids.shape[0]) z, mz = moving_stat(centroids[:, 0], out_mask, half_win=100) outliers = np.zeros(centroids.shape[0]) outliers[mz > 3.5] = 1 outliers[out_mask > 0] = 1 outliers = outliers > 0 index = np.arange(centroids.shape[0]) centroids[outliers, 0] = np.interp(index[outliers], index[~outliers], centroids[~outliers, 0]) z, mz = moving_stat(centroids[:, 1], out_mask, half_win=100) outliers = np.zeros(centroids.shape[0]) outliers[mz > 3.5] = 1 outliers[out_mask > 0] = 1 outliers = outliers > 0 index = np.arange(centroids.shape[0]) centroids[outliers, 1] = np.interp(index[outliers], index[~outliers], centroids[~outliers, 1]) output = "../plots/%s/cata/output.csv" % (name) dir = os.path.dirname(output) if not os.path.exists(dir): os.makedirs(dir) plt.plot(centroids[:, 0], '.b') plt.savefig('../plots/%s/cata/offsets_10_new_half.pdf' % name, dpi=190) plt.clf() np.save('../data/%s/cata/time%d_10_new_half.npy' % (name, initial), time) np.save('../data/%s/cata/offsets%d_10_new_half.npy' % (name, initial), centroids) co_data = hdulist[1].data T = co_data['T'] ra = co_data['ra'] dec = co_data['dec'] roll = co_data['roll'] ra_new = np.interp(time, T, ra) - centroids[:, 0] dec_new = np.interp(time, T, dec) - centroids[:, 1] roll_new = np.interp(time, T, roll) - centroids[:, 2] other = np.zeros((time.shape[0], 8)) array = np.concatenate( [np.array([time, ra_new, dec_new, roll_new]).T, other], axis=1) data = np.core.records.fromarrays(array.transpose(), dtype=[('T', float), ('RA', float), ('DEC', float), ('ROLL', float),\ ('STATUS_FLAG', int), ('ROLL_RAD', float), ('X', float), ('Y', float), ('Z', float), ('XDOT', float), ('YDOT', float), ('ZDOT', float)]) new_file = '../AIS_GAL_SCAN/asprta/%s-cal-asprta.fits' % (name) os.system( 'cp ../AIS_GAL_SCAN/asprta/%s-asprta.fits ../AIS_GAL_SCAN/asprta/%s-cal-asprta.fits' % (name, name)) update(new_file, data, 1) hdu = pyfits.open(new_file) print hdu[1].data['RA'].shape print hdu[1].data['DEC'].shape hdu.close() #asp_cal.interpolate_offsets(name, 1., centroids) tmp_files = glob.glob("../data/%s/cata/centroids_rot*" % name) for tmp_file in tmp_files: os.remove(tmp_file) tmp_files = glob.glob("../data/%s/cata/time_rot*" % name) for tmp_file in tmp_files: os.remove(tmp_file) '''