def writeWeights(self): """ Writes an h5 file to put calculated flat cal factors in """ if os.path.isabs(self.flatCalFileName) == True: fullFlatCalFileName = self.flatCalFileName print(self.flatCalFileName) else: scratchDir = os.getenv('MKID_PROC_PATH') flatDir = os.path.join(scratchDir, 'flatCalSolnFiles') fullFlatCalFileName = os.path.join(flatDir, self.flatCalFileName) if not os.path.exists(fullFlatCalFileName) and self.calSolnPath == '': os.makedirs(fullFlatCalFileName) try: flatCalFile = tables.open_file(fullFlatCalFileName, mode='w') except: print('Error: Couldn\'t create flat cal file, ', fullFlatCalFileName) return print('wrote to', self.flatCalFileName) calgroup = flatCalFile.create_group( flatCalFile.root, 'flatcal', 'Table of flat calibration weights by pixel and wavelength') calarray = tables.Array( calgroup, 'weights', obj=self.flatWeights.data, title= 'Flat calibration Weights indexed by pixelRow,pixelCol,wavelengthBin' ) flagtable = tables.Array( calgroup, 'flags', obj=self.flatFlags, title= 'Flat cal flags indexed by pixelRow,pixelCol,wavelengthBin. 0 is Good' ) bintable = tables.Array( calgroup, 'wavelengthBins', obj=self.wvlBinEdges, title= 'Wavelength bin edges corresponding to third dimension of weights array' ) descriptionDict = FlatCalSoln_Description(self.nWvlBins) caltable = flatCalFile.create_table(calgroup, 'calsoln', descriptionDict, title='Flat Cal Table') for iRow in range(self.nXPix): for iCol in range(self.nYPix): weights = self.flatWeights[iRow, iCol, :] deltaWeights = self.deltaFlatWeights[iRow, iCol, :] flags = self.flatFlags[iRow, iCol, :] flag = np.any(self.flatFlags[iRow, iCol, :]) pixelName = self.beamImage[iRow, iCol] entry = caltable.row entry['resid'] = pixelName entry['pixelrow'] = iRow entry['pixelcol'] = iCol entry['weights'] = weights entry['weightUncertainties'] = deltaWeights entry['weightFlags'] = flags entry['flag'] = flag entry.append() flatCalFile.flush() flatCalFile.close() npzFileName = os.path.splitext(fullFlatCalFileName)[0] + '.npz'
def addFontAtlas(self, font_atlas): style_group = None family_group = self.getFamilyGroup(font_atlas.font_info.family_name) if family_group: style_group = self.getStyleGroup(font_atlas, family_group) if style_group: size = font_atlas.size dpi = font_atlas.dpi # save the original font file to the hdf5 file ttf_file_name = os.path.split(font_atlas.font_info.path)[-1] ttf_node_name = ttf_file_name.replace(u'.', u'_') try: ttf_exists = style_group._f_get_child(ttf_node_name) except tb.NoSuchNodeError, e: import tables from tables.nodes import filenode f = file(font_atlas.font_info.path, 'rb') ttf_node = filenode.new_node(self._tables, where=style_group, name=ttf_node_name, title=ttf_file_name) ttf_node.write(f.read()) f.close() ttf_node.close() # Create a group for this font size, dpi combo. font_size_group = None for a in self._tables.list_nodes(style_group.sizes, classname='Group'): if a._v_attrs.TITLE == "%d PT, %d DPI Data" % (size, dpi): font_size_group = a break if font_size_group is None: font_size_group = self._tables.create_group( style_group.sizes, "D_%d_%d" % (size, dpi), "%d PT, %d DPI Data" % (size, dpi)) #Save some atlas info for later use.. font_size_group._v_attrs[ 'max_ascender'] = font_atlas.max_ascender font_size_group._v_attrs[ 'max_descender'] = font_atlas.max_descender font_size_group._v_attrs[ 'max_tile_width'] = font_atlas.max_tile_width font_size_group._v_attrs[ 'max_tile_height'] = font_atlas.max_tile_height font_size_group._v_attrs[ 'max_bitmap_size'] = font_atlas.max_bitmap_size font_size_group._v_attrs[ 'total_bitmap_area'] = font_atlas.total_bitmap_area # create 2d array to store the fontatlas bitmap data in. atlas_bmp = tb.Array( font_size_group, "FontGlyphAtlas", obj=font_atlas.atlas.data, title='Array Holding the Font Face Glyph Bitmaps') # Save the info for each glyph so atlas data array and glyph # location can be used to generate display lists when the font # store is retrieved. chr_glyph_table = self._tables.create_table( font_size_group, 'UnicharGlyphData', FontGlyphData, "Data regarding one char/glyph within the font set.", expectedrows=400) tdata = [] for charcode, gfinfo in font_atlas.charcode2glyph.iteritems(): x, y, w, h = gfinfo['atlas_coords'] x1, y1, x2, y2 = gfinfo['texcoords'] tdata.append((gfinfo['index'], charcode, gfinfo['unichar'].encode('utf-8'), gfinfo['offset'][0], gfinfo['offset'][1], gfinfo['size'][0], gfinfo['size'][1], x, y, w, h, x1, y1, x2, y2)) chr_glyph_table.append(tdata) chr_glyph_table.flush() else: print 'Font Size Group already exists!!', '%d pt, %d dpi' % ( size, dpi)
print "Wrote to FITS: ", fitsFileName except: print "FITS file already present, did not overwrite" ################################################# # Setup h5 file to save imageStack intermediate/cal files, parameter, and output stackPath = os.path.join(calPath,"imageStacks") h5Path = os.path.join(stackPath,date) h5baseName = configFileName.split('.')[0] h5FileName = h5Path+'/%s.h5'%h5baseName h5file = tables.open_file(h5FileName,mode='w') stackgroup = h5file.create_group(h5file.root, 'imageStack', 'Table of images, centroids, and parameters used to create a final stacked image') ################################################# timeArray = tables.Array(stackgroup,'timestamps',timeStamps,title='Timestamps') ditherArray = tables.Array(stackgroup,'dithers',ditherNums,title='Dither positions') rawArray = tables.Array(stackgroup,'rawImgs',rawImgs,title='Raw Images') hotArray = tables.Array(stackgroup,'hpms',hotPixMasks,title='Hot Pixel Masks') coldArray = tables.Array(stackgroup,'cpms',coldPixMasks,title='Cold Pixel Masks') deadArray = tables.Array(stackgroup,'dpms',deadPixMasks,title='Dead Pixel Masks') aperArray = tables.Array(stackgroup,'ams',aperMasks,title='Aperture Masks') roughXArray = tables.Array(stackgroup,'roughX',roughShiftsX,title='Rough X Shifts') roughYArray = tables.Array(stackgroup,'roughY',roughShiftsY,title='Rough Y Shifts') fineXArray = tables.Array(stackgroup,'fineX',fineShiftsX,title='Fine X Shifts') fineYArray = tables.Array(stackgroup,'fineY',fineShiftsY,title='Fine Y Shifts') centXArray = tables.Array(stackgroup,'centX',centroidsX,title='Centroid X Locations') centYArray = tables.Array(stackgroup,'centY',centroidsY,title='Centroid Y Locations') darkArray = tables.Array(stackgroup,'dark',dark,title='Dark Frame') flatArray = tables.Array(stackgroup,'flat',flat,title='Flat Frame') finalArray = tables.Array(stackgroup,'finalImg',finalImage,title='Final Image')
def save_available_ids(h5f, available_ids): if hasattr(h5f.root, 'available_ids'): h5f.root.available_ids._f_remove() arr = tb.Array(h5f.root, 'available_ids', obj=available_ids)
def writeWeights(self, poly_power=2): """ Writes an h5 file to put calculated flat cal factors in """ if not os.path.exists(self.out_directory): os.makedirs(self.out_directory) try: flatCalFile = tables.open_file(self.flatCalFileName, mode='w') except IOError: getLogger(__name__).error("Couldn't create flat cal file: {} ", self.flatCalFileName) return header = flatCalFile.create_group(flatCalFile.root, 'header', 'Calibration information') tables.Array(header, 'beamMap', obj=self.beamImage.residmap) tables.Array(header, 'xpix', obj=self.xpix) tables.Array(header, 'ypix', obj=self.ypix) calgroup = flatCalFile.create_group( flatCalFile.root, 'flatcal', 'Table of flat calibration weights by pixel and wavelength') tables.Array( calgroup, 'weights', obj=self.flatWeights.data, title= 'Flat calibration Weights indexed by pixelRow,pixelCol,wavelengthBin' ) tables.Array( calgroup, 'spectrum', obj=self.countCubesToSave.data, title='Twilight spectrum indexed by pixelRow,pixelCol,wavelengthBin' ) tables.Array( calgroup, 'flags', obj=self.flatFlags, title= 'Flat cal flags indexed by pixelRow,pixelCol,wavelengthBin. 0 is Good' ) # TODO this is a misnomer not bins but wavelengths of the calibration tables.Array( calgroup, 'wavelengthBins', obj=self.wavelengths, title= 'Wavelength bin edges corresponding to third dimension of weights array' ) descriptionDict = FlatCalSoln_Description(nWvlBins=len( self.wavelengths), max_power=poly_power) caltable = flatCalFile.create_table(calgroup, 'calsoln', descriptionDict, title='Flat Cal Table', expectedrows=self.xpix * self.ypix) for iRow in range(self.xpix): for iCol in range(self.ypix): entry = caltable.row entry['resid'] = self.beamImage.residmap[iRow, iCol] entry['y'] = iRow entry['x'] = iCol entry['weight'] = self.flatWeights.data[iRow, iCol, :] entry['err'] = self.flatWeightErr[iRow, iCol, :] fittable = (entry['weight'] != 0) & np.isfinite(entry['weight'] + entry['err']) if fittable.sum() < poly_power + 1: entry['bad'] = True else: entry['coeff'] = np.polyfit(self.wavelengths[fittable], entry['weight'][fittable], poly_power, w=1 / entry['err'][fittable]**2) entry['bad'] = self.flatFlags[iRow, iCol, :].any() entry['spectrum'] = self.countCubesToSave.data[iRow, iCol, :] entry.append() flatCalFile.close() getLogger(__name__).info("Wrote to {}".format(self.flatCalFileName))
def genphotonlist2D(Ic, Is, Ir, Ttot, tau, out_directory, deadtime=0, interpmethod='cubic', taufac=500, diffrac_lim=2.86): """ Same functionality as genphotonlist except this function is intended to iterate over an entire 2D (Ic,Is) map It makes sure the intensities generated from corrsequence are correlated spatially (they will be correlated temporally) """ # Generate a correlated Gaussian sequence, correlation time tau. # Then transform this to a random variable uniformly distributed # between 0 and 1, and finally back to a modified Rician random # variable. This method ensures that: (1) the output is M-R # distributed, and (2) it is exponentially correlated. Finally, # return a list of photons determined by the probability of each # unit of time giving a detected photon. # Number of microseconds per bin in which we discretize intensity N = max(int(tau * 1e6 / taufac), 1) t_base, N_base = corrsequence(int(Ttot * 1e6 / N), tau * 1e6 / N) x_size, y_size = Ic.shape t_size = len(t_base) intensity_cube = np.zeros([x_size, y_size, t_size]) for x_pix in range(x_size): for y_pix in range(y_size): if Is[x_pix, y_pix] > 0.0 and Ic[x_pix, y_pix] > 0.0 and Is[ x_pix, y_pix] > 1e-8 * Ic[x_pix, y_pix]: t, normal = corrsequence(int(Ttot * 1e6 / N), tau * 1e6 / N) intensity_cube[x_pix, y_pix, :] = normal elif Is[x_pix, y_pix] >= 0.0 and Ic[x_pix, y_pix] >= 0.0: #N = max(N, 1000) t = np.arange(int(Ttot * 1e6)) intensity_cube[x_pix, y_pix, :] = Ic[x_pix, y_pix] / 1e6 * np.ones(t_size) else: intensity_cube[x_pix, y_pix, :] = np.full_like( intensity_cube[x_pix, y_pix, :], np.nan) intensity_cube_uncorr = np.copy(intensity_cube) for timestamp in range(t_size): intensity_cube[:, :, timestamp] = diffrac_lim_kernel( intensity_cube[:, :, timestamp], diffrac_lim=diffrac_lim, nan_treatment='interpolate', preserve_nan=True) intensity_cube_corr = np.copy(intensity_cube) photon_table = tables.open_file(out_directory, mode='w') Header = photon_table.create_group(photon_table.root, 'header', 'header') header = photon_table.create_table(Header, 'header', ObsHeader, title='Header') beamFlag = np.zeros([x_size, y_size]) beamMap = np.zeros([x_size, y_size]) BeamMap = photon_table.create_group(photon_table.root, 'BeamMap', 'BeamMap') tables.Array(BeamMap, 'Flag', obj=beamFlag) tables.Array(BeamMap, 'Map', obj=beamMap) Images = photon_table.create_group(photon_table.root, 'Images', 'Images') Photons = photon_table.create_group(photon_table.root, 'Photons', 'Photons') PhotonTable = photon_table.create_table(Photons, 'PhotonTable', ObsFileCols, title='Photon Data') head = header.row head['beammapFile'] = '' head['dataDir'] = '' head['energyBinWidth'] = 0.1 head['expTime'] = 0.0 head['isFlatCalibrated'] = False head['isLinearityCorrected'] = False head['isPhaseNoiseCorrected'] = False head['isPhotonTailCorrected'] = False head['isFluxCalibrated'] = False head['isWvlCalibrated'] = False head['isFlatCalibrated'] = False head['startTime'] = 0 head['target'] = 'CHARIS' head['timeMaskExists'] = False head['wvlBinStart'] = 700.0 head['wvlBinEnd'] = 700.0 head['wvlCalFile'] = '' head.append() photon = PhotonTable.row iteration = 0 t *= N for x_pix in range(x_size): for y_pix in range(y_size): print(x_pix, y_pix) if Is[x_pix, y_pix] > 0.0 and Ic[x_pix, y_pix] > 0.0 and Is[ x_pix, y_pix] > 1e-8 * Ic[x_pix, y_pix]: normal = intensity_cube[x_pix, y_pix, :] uniform = 0.5 * (special.erf(normal / np.sqrt(2)) + 1) f = MRicdf(Ic[x_pix, y_pix], Is[x_pix, y_pix], interpmethod=interpmethod) I = f(uniform) / 1e6 intensity_cube[x_pix, y_pix, :] = I # Number of photons from each distribution in each time bin I = intensity_cube[x_pix, y_pix, :] n1 = np.random.poisson(I * N) n2 = np.random.poisson(np.ones(t.shape) * Ir / 1e6 * N) # Go ahead and make the list with repeated times tlist = t[np.where(n1 > 0)] tlist_r = t[np.where(n2 > 0)] for i in range(1, max(np.amax(n1), np.amax(n2)) + 1): tlist = np.concatenate((tlist, t[np.where(n1 > i)])) tlist_r = np.concatenate((tlist_r, t[np.where(n2 > i)])) tlist_tot = np.concatenate((tlist, tlist_r)) * 1. # Add a random number to give the exact arrival time within the bin tlist_tot += N * np.random.rand(len(tlist_tot)) # Cython is much, much faster given that this has to be an # explicit for loop; without Cython (even with numba) this step # would dominate the run time. Returns indices of the times we # keep. indx = np.argsort(tlist_tot) keep = utils.removedeadtime(tlist_tot[indx], deadtime) final_tlist = tlist_tot[indx][np.where(keep)] for Time in final_tlist: photon['ResID'] = float(iteration) photon['Wavelength'] = 700.0 photon['SpecWeight'] = 1.0 photon['NoiseWeight'] = 1.0 photon['Time'] = Time photon.append() iteration += 1 photon_table.flush() photon_table.close() intensity_cube_MR = np.copy(intensity_cube) return (intensity_cube_uncorr, intensity_cube_corr, intensity_cube_MR)
def writeWeights(self): """ Writes an h5 file to put calculated flat cal factors in """ if os.path.isabs(self.flatCalFileName) == True: fullFlatCalFileName = self.flatCalFileName baseh5path = fullFlatCalFileName.split('.h5') fullFlatCalFileName = baseh5path[0] + str(self.indexweights + 1) + '.h5' else: scratchDir = os.getenv('MKID_PROC_PATH') flatDir = os.path.join(scratchDir, 'flatCalSolnFiles') fullFlatCalFileName = os.path.join(flatDir, self.flatCalFileName) baseh5path = fullFlatCalFileName.split('.h5') fullFlatCalFileName = baseh5path[0] + str(self.indexweights + 1) + '.h5' if not os.path.exists(fullFlatCalFileName) and self.calSolnPath == '': os.makedirs(fullFlatCalFileName) try: flatCalFile = tables.open_file(fullFlatCalFileName, mode='w') except: print('Error: Couldn\'t create flat cal file, ', fullFlatCalFileName) return header = flatCalFile.create_group(flatCalFile.root, 'header', 'Calibration information') tables.Array(header, 'beamMap', obj=self.beamImage) tables.Array(header, 'nxpix', obj=self.nxpix) tables.Array(header, 'nypix', obj=self.nypix) calgroup = flatCalFile.create_group(flatCalFile.root, 'flatcal', 'Table of flat calibration weights by pixel and wavelength') tables.Array(calgroup, 'weights', obj=self.flatWeights.data, title='Flat calibration Weights indexed by pixelRow,pixelCol,wavelengthBin') tables.Array(calgroup, 'spectrum', obj=self.countCubesToSave.data, title='Twilight spectrum indexed by pixelRow,pixelCol,wavelengthBin') tables.Array(calgroup, 'flags', obj=self.flatFlags, title='Flat cal flags indexed by pixelRow,pixelCol,wavelengthBin. 0 is Good') tables.Array(calgroup, 'wavelengthBins', obj=self.wvlBinEdges, title='Wavelength bin edges corresponding to third dimension of weights array') descriptionDict = FlatCalSoln_Description(self.nWvlBins) caltable = flatCalFile.create_table(calgroup, 'calsoln', descriptionDict, title='Flat Cal Table') for iRow in range(self.nxpix): for iCol in range(self.nypix): weights = self.flatWeights[iRow, iCol, :] spectrum = self.countCubesToSave[iRow, iCol, :] deltaWeights = self.deltaFlatWeights[iRow, iCol, :] flags = self.flatFlags[iRow, iCol, :] flag = np.any(self.flatFlags[iRow, iCol, :]) pixelName = self.beamImage[iRow, iCol] entry = caltable.row entry['resid'] = pixelName entry['pixelrow'] = iRow entry['pixelcol'] = iCol entry['weights'] = weights entry['weightUncertainties'] = deltaWeights entry['spectrum'] = spectrum entry['weightFlags'] = flags entry['flag'] = flag entry.append() flatCalFile.flush() flatCalFile.close() # close progress bar if self.verbose: self.pbar.finish() if self.verbose: print('wrote to', fullFlatCalFileName)