def bright_pix(self, fitsfile, hdu): """ Does the work of finding the bright pixels and columns. """ im = image_utils.unbias_and_trim(afwImage.ImageF(fitsfile, hdu)) imarr = im.getArray() mean = np.mean(imarr) sigma = np.std(imarr) threshold = self.nsig * sigma + mean # Find bright pixels. pixels = np.where(imarr > threshold) # Find bright columns. col_means = [np.mean(imarr[:, i]) for i in range(im.getWidth())] columns = np.where(col_means > threshold) # Weed out bright pixels that are already in bright columns or rows. indx = [ i for i in range(len(pixels[1])) if pixels[1][i] not in columns[0] ] pixels = (pixels[0][indx], pixels[1][indx]) tup = list(zip(pixels[1], pixels[0])) sorted_tup = sorted(tup) return len(sorted_tup), sorted_tup, columns
def dark_pct(files, percentile=90., hdu=2, gain=1): if percentile < 0 or percentile > 100: raise RuntimeError("percentile must be between 0 and 100") # Read exposure time from the primary HDU of the first file. exptime = afwImage.readMetadata(files[0], 1).get('EXPTIME') im = unbias_and_trim(fits_median(files)) im *= gain im /= exptime npix = im.getHeight() * im.getWidth() imarr = np.sort(im.getArray().reshape(npix)) return imarr[int(npix * float(percentile) / 100.)]
def dark_curr(files, hdu=2, gain=1, count=1000, dx=100, dy=100, seed=None): random.seed(seed) exptime = afwImage.readMetadata(files[0], 1).get('EXPTIME') im = unbias_and_trim(fits_median(files, hdu=hdu)) # Generate dx by dy boxes at random locations to perform # estimates, then take the median. This avoids bright defects. xarr = random.randint(im.getWidth() - dx - 1, size=count) yarr = random.randint(im.getHeight() - dy - 1, size=count) signal = [] for x, y in zip(xarr, yarr): bbox = afwGeom.Box2I(afwGeom.Point2I(int(x), int(y)), afwGeom.Extent2I(dx, dy)) subim = im.Factory(im, bbox) signal.append(np.mean(subim.getArray())) dark_current = np.median(signal) * gain / exptime return dark_current
o = open(outfilepath, "w+") o.write('\t'.join( ['filename', 'exptime', 'amp', 'wavelength', 'median', 'photodiode', '\n'])) for i, fname in enumerate(files): #get wavelength, pd reading, and exptime md = afwImage.readMetadata(fname, 1) wl = md.get("MONO_WAVELENG") photodiode = md.get("K_PHOT_CURRENT") exptime = md.get("EXPTIME") for amp in amps: #open image, trim and debias, and get median im = afwImage.ImageF(fname, amp + 1) im2 = iu.unbias_and_trim(im) ampmedian = afwMath.makeStatistics(im2, afwMath.MEDIAN).getValue() o.write('\t'.join([ fname, str(exptime), str(amp), str(wl), str(ampmedian), str(photodiode), '\n' ])) print "%i of %i done " % (i, len(files))
#write column names to output file f = open(outfile, "w+") f.write('\t'.join([ "agg_amp", "vic_01", "vic_02", "vic_03", "vic_04", "vic_05", "vic_06", "vic_07", "vic_08", "vic_09", "vic_10", "vic_11", "vic_12", "vic_13", "vic_14", "vic_15", "vic_16", "\n" ])) #find appropriate image files = glob.glob("/Users/amali/Desktop/900nm*.fits") fname = files[0] #bias correct aggressor image im_a = afwImage.ImageF(fname, agg + 1) im_a2 = iu.unbias_and_trim(im_a) #######The DM stack stuff I couldn't get to work #im_a = afwImage.ExposureF(fname, agg+1) #im_a_i = im_a.getMaskedImage().getImage() #im_a2 = iu.unbias_and_trim(im_a_i) #ds9.mtv(im_a2) #threshold = afwDetect.Threshold(30000) #fs = afwDetect.FootprintSet(im_a.getMaskedImage(), threshold) #fs.setMask(im_a.getMaskedImage().getMask(), "DETECTED") #ds9.mtv(im_a) ##########using numpy instead