def processImage(self, imgdata):
		stackedname = os.path.join(self.params['rundir'], imgdata['filename']+"power.jpg")

		if os.path.isfile(stackedname) and apFile.fileSize(stackedname) > 100:
			return

		### make the power spectra
		powerspectra = imagefun.power(imgdata['image'], mask_radius=1.0, thresh=4)
		binpowerspectra = imagefun.bin2(powerspectra, self.params['bin'])
		del powerspectra
		if self.params['hp'] is True:
			binpowerspectra = apImage.fermiHighPassFilter(binpowerspectra, apix=4.0, radius=2000.0)
		binpowerspectra = apImage.normStdevMed(binpowerspectra, size=5) 
		binpowerspectra = apImage.pixelLimitFilter(binpowerspectra, pixlimit=4)
		binpowerspectra = apImage.normRange(binpowerspectra)

		### filter the image
		imagedata = imagefun.bin2(imgdata['image'], self.params['bin'])
		del imgdata['image']
		imagedata = apImage.normStdevMed(imagedata, size=5) 
		imagedata = apImage.pixelLimitFilter(imagedata, pixlimit=2)
		imagedata = apImage.normRange(imagedata)

		### write to file
		stacked = numpy.hstack([imagedata, binpowerspectra])
		del imagedata, binpowerspectra
		apImage.arrayToJpeg(stacked, stackedname)
    def processImage(self, imgdata):
        stackedname = os.path.join(self.params['rundir'],
                                   imgdata['filename'] + "power.jpg")

        if os.path.isfile(stackedname) and apFile.fileSize(stackedname) > 100:
            return

        ### make the power spectra
        powerspectra = imagefun.power(imgdata['image'],
                                      mask_radius=1.0,
                                      thresh=4)
        binpowerspectra = imagefun.bin2(powerspectra, self.params['bin'])
        del powerspectra
        if self.params['hp'] is True:
            binpowerspectra = apImage.fermiHighPassFilter(binpowerspectra,
                                                          apix=4.0,
                                                          radius=2000.0)
        binpowerspectra = apImage.normStdevMed(binpowerspectra, size=5)
        binpowerspectra = apImage.pixelLimitFilter(binpowerspectra, pixlimit=4)
        binpowerspectra = apImage.normRange(binpowerspectra)

        ### filter the image
        imagedata = imagefun.bin2(imgdata['image'], self.params['bin'])
        del imgdata['image']
        imagedata = apImage.normStdevMed(imagedata, size=5)
        imagedata = apImage.pixelLimitFilter(imagedata, pixlimit=2)
        imagedata = apImage.normRange(imagedata)

        ### write to file
        stacked = numpy.hstack([imagedata, binpowerspectra])
        del imagedata, binpowerspectra
        apImage.arrayToJpeg(stacked, stackedname)
예제 #3
0
 def numpyToWxImage(self, array):
     clip = self.contrasttool.getRange()
     normarray = array.astype(numpy.float32)
     if normarray.shape[0] == normarray.shape[
             1] and normarray.shape[0] >= 4096:
         normarray = imagefun.bin2(normarray, 4)
     wximage = wx.EmptyImage(normarray.shape[1], normarray.shape[0])
     normarray = normarray.clip(min=clip[0], max=clip[1])
     normarray = (normarray - clip[0]) / (clip[1] - clip[0]) * 255.0
     if self.colormap is None:
         normarray = normarray.astype(numpy.uint8)
         h, w = normarray.shape[:2]
         imagedata = Image.fromstring("L", (w, h), normarray.tostring())
     else:
         valarray = normarray * 6.0
         valarray = valarray.astype(numpy.uint16)
         remapColor = numpy.array(self.colormap)
         rgbarray = remapColor[valarray].astype(numpy.uint8)
         print rgbarray[:, :, 0]
         h, w = normarray.shape[:2]
         r = Image.fromstring("L", (w, h), rgbarray[:, :, 0].tostring())
         g = Image.fromstring("L", (w, h), rgbarray[:, :, 1].tostring())
         b = Image.fromstring("L", (w, h), rgbarray[:, :, 2].tostring())
         imagedata = Image.merge("RGB", (r, g, b))
     wximage.SetData(imagedata.convert('RGB').tostring())
     return wximage
	def numpyToWxImage(self, array):
		clip = self.contrasttool.getRange()
		normarray = array.astype(numpy.float32)
		if normarray.shape[0] == normarray.shape[1]  and normarray.shape[0] >= 4096:
			normarray = imagefun.bin2(normarray, 4)
		wximage = wx.EmptyImage(normarray.shape[1], normarray.shape[0])
		normarray = normarray.clip(min=clip[0], max=clip[1])
		normarray = (normarray - clip[0]) / (clip[1] - clip[0]) * 255.0
		if self.colormap is None:
			normarray = normarray.astype(numpy.uint8)
			h, w = normarray.shape[:2]
			imagedata = Image.fromstring("L", (w, h), normarray.tostring())
		else:
			valarray = normarray*6.0
			valarray = valarray.astype(numpy.uint16)
			remapColor = numpy.array(self.colormap)
			rgbarray = remapColor[valarray].astype(numpy.uint8)
			print rgbarray[:,:,0]
			h, w = normarray.shape[:2]
			r = Image.fromstring("L", (w, h), rgbarray[:,:,0].tostring())
			g = Image.fromstring("L", (w, h), rgbarray[:,:,1].tostring())
			b = Image.fromstring("L", (w, h), rgbarray[:,:,2].tostring())
			imagedata = Image.merge("RGB", (r,g,b))
		wximage.SetData(imagedata.convert('RGB').tostring())
		return wximage
예제 #5
0
def padpower(image, pixelsize, fieldsize=None, mask_radius=0.5):
	"""
	computes power spectra of image using padding
	"""
	t0 = time.time()
	if fieldsize is None:
		fieldsize = getFieldSize(image.shape)	
	maxDim = max(image.shape)
	powerTwo = math.ceil(math.log(maxDim)/math.log(2.0))
	powerTwoDim = int(2**(powerTwo))
	squareImage = imagefilter.frame_constant(image, (powerTwoDim,powerTwoDim))
	envelop = twodHann(powerTwoDim)
	poweravg = imagefun.power(squareImage*envelop, mask_radius)
	binning = int(powerTwoDim/fieldsize)
	imagefun.bin2(poweravg, binning)	
	freq = 1.0/(poweravg.shape[0]*pixelsize)
	apDisplay.printMsg("Fast compute PSD with size %d -> %d complete in %s"
		%(powerTwoDim, fieldsize, apDisplay.timeString(time.time()-t0)))
	return poweravg, freq
	def mergeImageStackIntoBigStack(self, imgstackfile, imgdata):
		t0 = time.time()
		apDisplay.printMsg("filtering particles and adding to stack")
		# if applying a boxmask, write to a temporary file before adding to main stack
		bigimgstack = os.path.join(self.params['rundir'], self.params['single'])
		if self.params['boxmask'] is not None:
			bigimgstack = os.path.splitext(imgstackfile)[0]+"-premask.hed"
		### here is the craziness
		### step 1: read imgstackfile into memory
		imgstackmemmap = imagic.read(imgstackfile)
		### when only particle is read it defaults to a 2D array instead of 3D array
		if len(imgstackmemmap.shape) < 3:
			imgstackmemmap = imgstackmemmap.reshape(1, imgstackmemmap.shape[0], imgstackmemmap.shape[1])
		if self.params['debug'] is True:
			print "imgstackmemmap.shape", imgstackmemmap.shape
		apix = self.params['apix'] #apDatabase.getPixelSize(imgdata)

		boxshape = (self.boxsize, self.boxsize)
		processedParticles = []
		for particle in imgstackmemmap:

			### step 2: filter particles
			### high / low pass filtering
			#if self.params['pixlimit']:
			#	particle = imagefilter.pixelLimitFilter(particle, self.params['pixlimit'])
			if self.params['lowpass']:
				particle = imagefilter.lowPassFilter(particle, apix=apix, radius=self.params['lowpass'])
			if self.params['highpass']:
				particle = imagefilter.highPassFilter2(particle, self.params['highpass'], apix=apix)
			### unless specified, invert the images
			if self.params['inverted'] is True:
				particle = -1.0 * particle
			if particle.shape != boxshape:
				if self.boxsize <= particle.shape[0] and self.boxsize <= particle.shape[1]:
					particle = imagefilter.frame_cut(particle, boxshape)
				else:
					apDisplay.printError("particle shape (%dx%d) is smaller than boxsize (%d)"
						%(particle.shape[0], particle.shape[1], self.boxsize))

			### step 3: normalize particles
			#self.normoptions = ('none', 'boxnorm', 'edgenorm', 'rampnorm', 'parabolic') #normalizemethod
			if self.params['normalizemethod'] == 'boxnorm':
				particle = imagenorm.normStdev(particle)
			elif self.params['normalizemethod'] == 'edgenorm':
				particle = imagenorm.edgeNorm(particle)
			elif self.params['normalizemethod'] == 'rampnorm':
				particle = imagenorm.rampNorm(particle)
			elif self.params['normalizemethod'] == 'parabolic':
				particle = imagenorm.parabolicNorm(particle)

			### step 4: decimate/bin particles if specified
			### binning is last, so we maintain most detail and do not have to deal with binned apix
			if self.params['bin'] > 1:
				particle = imagefun.bin2(particle, self.params['bin'])

			#from scipy.misc import toimage
			#toimage(particle).show()

			processedParticles.append(particle)

		### step 5: merge particle list with larger stack
		apImagicFile.appendParticleListToStackFile(processedParticles, bigimgstack,
			msg=self.params['debug'])

		#remove original image stack from memory
		del imgstackmemmap
		del processedParticles

		t0 = time.time()
		# if applying boxmask, now mask the particles & append to stack
		if self.params['boxmask'] is not None:
			# normalize particles before boxing, since zeros in mask
			# can affect subsequent processing if not properly normalized
			apEMAN.executeEmanCmd("proc2d %s %s edgenorm inplace"%(bigimgstack,bigimgstack),showcmd=False)
			imgstack = apImagicFile.readImagic(bigimgstack, msg=False)
			maskstack = apImagicFile.readImagic(self.params['boxmaskf'],msg=False)
			for i in range(len(imgstack['images'])):
				imgstack['images'][i]*=maskstack['images'][i]
			maskedpartstack = os.path.splitext(imgstackfile)[0]+"-aftermask.hed"
			apImagicFile.writeImagic(imgstack['images'], maskedpartstack)
			bigimgstack = os.path.join(self.params['rundir'], self.params['single'])
			apEMAN.executeEmanCmd("proc2d %s %s flip"%(maskedpartstack,bigimgstack))

		### count particles
		bigcount = apFile.numImagesInStack(bigimgstack, self.boxsize/self.params['bin'])
		imgcount = apFile.numImagesInStack(imgstackfile, self.boxsize)

		### append to particle log file
		partlogfile = os.path.join(self.params['rundir'], self.timestamp+"-particles.info")
		f = open(partlogfile, 'a')
		for i in range(imgcount):
			partnum = self.particleNumber + i + 1
			line = str(partnum)+'\t'+os.path.join(imgdata['session']['image path'], imgdata['filename']+".mrc")
			f.write(line+"\n")
		f.close()

		self.mergestacktimes.append(time.time()-t0)

		return bigcount
예제 #7
0
    print image.dtype
    print abs(image.mean()), image.std()
    print image.min(), image.max()
    print "============="


# ===========
if __name__ == "__main__":
    bin = 1
    ### read image 1
    # a = mrc.read("/home/vossman/appion/lib/test01.mrc")
    # a = mrc.read("/home/vossman/appion/lib/waylon2.mrc")
    # a = mrc.read("/home/vossman/leginon/holetemplate.mrc")
    a = mrc.read("/ami/data00/appion/09mar04b/align/kerden11/09apr13q11.7.mrc")
    a = normImage(a)
    a = imagefun.bin2(a, bin)
    # printImageInfo(a)

    ### read image 2
    b = mrc.read("/ami/data00/appion/09mar04b/align/kerden11/09apr13q11.8.mrc")
    # b = mrc.read("/home/vossman/appion/lib/test02.mrc")
    # b = mrc.read("/home/vossman/leginon/holetemplate2.mrc")
    # b = mrc.read("/home/vossman/appion/lib/pickwei2.mrc")
    b = normImage(b)
    b = imagefun.bin2(b, bin)

    # spectralSNR([a, b])
    # fourierRingCorrelation(a, b)

    # sys.exit(1)
        print image.shape
        print image.dtype
        print abs(image.mean()), image.std()
        print image.min(), image.max()
        print "============="

#===========
if __name__ == "__main__":
        bin = 1
        ### read image 1
        #a = mrc.read("/home/vossman/appion/lib/test01.mrc")
        #a = mrc.read("/home/vossman/appion/lib/waylon2.mrc")
        #a = mrc.read("/home/vossman/leginon/holetemplate.mrc")
        a = mrc.read("/ami/data00/appion/09mar04b/align/kerden11/09apr13q11.7.mrc")
        a = normImage(a)
        a = imagefun.bin2(a, bin)
        #printImageInfo(a)

        ### read image 2
        b = mrc.read("/ami/data00/appion/09mar04b/align/kerden11/09apr13q11.8.mrc")
        #b = mrc.read("/home/vossman/appion/lib/test02.mrc")
        #b = mrc.read("/home/vossman/leginon/holetemplate2.mrc")
        #b = mrc.read("/home/vossman/appion/lib/pickwei2.mrc")
        b = normImage(b)
        b = imagefun.bin2(b, bin)

        #spectralSNR([a, b])
        #fourierRingCorrelation(a, b)

        #sys.exit(1)
예제 #9
0
	def start(self):

		### Works
		# read from MRC image/stack
		# read from HED/IMG stack
		# write to MRC image
		# write to MRC stack		
		# write to HED/IMG stack
		# append to MRC stack
		# append to HED/IMG stack
		# filter images
		# implement binning
		# implement normalization

		### needs more testing
		# write pixelsize to new MRC file
		# get apix from MRC header
		# implement proc2d --average
		# implement proc2d --list feature

		### TODO
		# read from SPIDER stack
		# read from SPIDER image
		# read from EMAN2/HDF stack		
		# write to SPIDER image
		# write to SPIDER stack
		# write to EMAN2/HDF stack
		# get apix from HED/IMG header
		# implement proc2d --rotavg
		# implement proc2d --clip

		# determine numParticles to add
		if self.params['last'] is None:
			self.params['last'] = self.inputNumParticles - 1 #stacks start at 0
		elif self.params['last'] > self.inputNumParticles:
			apDisplay.printWarning("Last particle requested (%d) is larger than available particles (%d)"
				%(self.params['last'], self.inputNumParticles))
			self.params['last'] = self.inputNumParticles - 1 #stacks start at 0
		addNumParticles = self.params['last'] - self.params['first'] + 1

		### prepare for an existing file
		existNumParticles = self.outFileStatus()
		self.totalParticles = existNumParticles + addNumParticles

		indata = self.readFileData(self.params['infile'])

		#it more efficient to process X particles and write them to disk rather than
		# write each particle to disk each time.
		#particles are read using a memory map (numpy.memmap), so we can pretend to
		# continuously read all into memory
		particlesPerCycle = self.getParticlesPerCycle(self.params['infile'])

		if self.params['average'] is True:
			summedPartice = numpy.zeros((self.boxsize,self.boxsize))

		processedParticles = []
		if self.params['list']:
			partlist = self.readKeepList()
		else:
			partlist = range(self.params['first'], self.params['first']+addNumParticles)
		count = 0
		for partnum in partlist:
			count += 1
			particle = indata[partnum]
			if self.params['debug'] is True:
				print "---------"
				print "Particle Number: %d of %d"%(partnum, addNumParticles)
			if self.params['pixlimit']:
				self.message("pixlimit: %s"%(self.params['pixlimit']))
				particle = imagefilter.pixelLimitFilter(particle, self.params['pixlimit'])
			if self.params['lowpass']:
				self.message("lowpass: %s"%(self.params['lowpass']))
				particle = imagefilter.lowPassFilter(particle, apix=self.params['apix'], radius=self.params['lowpass'])
			if self.params['highpass']:
				self.message("highpass: %s"%(self.params['highpass']))
				particle = imagefilter.highPassFilter2(particle, self.params['highpass'], apix=self.params['apix'])
			### unless specified, invert the images
			if self.params['inverted'] is True:
				self.message("inverted: %s"%(self.params['inverted']))
				particle = -1.0 * particle
			### clipping
			"""
			if particle.shape != boxshape:
				if boxsize <= particle.shape[0] and boxsize <= particle.shape[1]:
					particle = imagefilter.frame_cut(particle, boxshape)
				else:
					apDisplay.printError("particle shape (%dx%d) is smaller than boxsize (%d)"
						%(particle.shape[0], particle.shape[1], boxsize))
			"""

			### step 3: normalize particles
			#self.normoptions = ('none', 'boxnorm', 'edgenorm', 'rampnorm', 'parabolic') #normalizemethod
			self.message("normalize method: %s"%(self.params['normalizemethod']))
			if self.params['normalizemethod'] == 'boxnorm':
				particle = imagenorm.normStdev(particle)
			elif self.params['normalizemethod'] == 'edgenorm':
				particle = imagenorm.edgeNorm(particle)
			elif self.params['normalizemethod'] == 'rampnorm':
				particle = imagenorm.rampNorm(particle)
			elif self.params['normalizemethod'] == 'parabolic':
				particle = imagenorm.parabolicNorm(particle)

			### step 4: decimate/bin particles if specified
			### binning is last, so we maintain most detail and do not have to deal with binned apix
			if self.params['bin'] > 1:
				particle = imagefun.bin2(particle, self.params['bin'])

			### working above this line
			if self.params['average'] is True:
				summedPartice += particle
			else:
				processedParticles.append(particle)

			if len(processedParticles) == particlesPerCycle:
				### step 5: merge particle list with larger stack
				self.appendParticleListToStackFile(processedParticles, self.params['outfile'])
				sys.stderr.write("%d of %d"%(count, len(partlist)))
				processedParticles = []
		if len(processedParticles) > 0:
			self.appendParticleListToStackFile(processedParticles, self.params['outfile'])
		if self.params['average'] is True:
			avgParticle = summedPartice/count
			self.appendParticleListToStackFile([avgParticle,], self.params['outfile'])
		print "Wrote %d particles to file "%(self.particlesWritten)
예제 #10
0
			%(fieldsize, count, apDisplay.timeString(time.time()-t0)))
	return poweravg, freq

#===================
#===================
#===================
if __name__ == "__main__":
	a = mrc.read("/home/vosslab/test.mrc")
	a = imagefilter.planeRegression(a)
	fullpower = imagefun.power(a)
	#imagestat.printImageInfo(a)
	t0 = time.time()
	x = numpy.arange(6, 13)
	N = 2**x
	print N
	for n in N:
		print "====================================="
		b = power(a, n)
		b = imagefilter.frame_cut(b, numpy.array(b.shape)/2)
		imagefile.arrayToPng(b, "%04d-field.png"%(n))
		imagestat.printImageInfo(b)

		bin = int(round(2**12/n))
		b = imagefun.bin2(fullpower, bin)
		b = imagefilter.frame_cut(b, numpy.array(b.shape)/2)
		imagefile.arrayToPng(b, "%04d-binned.png"%(n))
		imagestat.printImageInfo(b)

	print "complete in %s"%(apDisplay.timeString(time.time()-t0))
	#imagestat.printImageInfo(b)