Example #1
0
def writeFile(f,fname='TestMe',sps=None,start=0):
	ndoc = nmpml.blankDocument()
	if sps:
		tf = mdat.newData(f, {'Name':fname, 'SampleType':'timeseries', 'SamplesPerSecond':sps, "StartTime":start}) 
	else:			
		tf = mdat.newData(f, {'Name':fname, 'SampleType':'function'}) 
	ndoc.newElement(tf)
	io.write(ndoc, fname+".mdat")		
def tffmax(ds, usamp=True):
	ftt.scanForTFValues(ds, window=.2, minFreq=4.9, maxFreq = 250.0)
	tfv = ds.getSubData('/ftvals')
	tf = _cleanupTFV(tfv.getData())
	if tf.shape[0]==0:
		print("No values in fmax tf")
		return None
	if usamp:
		sv = tf[:,0].min()
		tf = uniformsample(tf, 1.0)
		dat = mdat.newData(tf, {'SampleType':'timeseries', 'SamplesPerSecond':1.0, "StartTime":sv})
	else:
		dat = mdat.newData(tf, {'SampleType':'function'})
	return dat
Example #3
0
def getSinTF(dname):
	dp = getDataPairs(dname)
	for exp in dp:
		dpl = dp[exp]
		print "Analyzing Sin Data for experiment %s" % exp
		dpl = [p for p in dpl if p[0][10]=='S' and not p[0][11]=='T']
		ds = assemble(dpl, dname)
		dat = ds.getData()
		dat[:,1] = cal._applyfilter(dat[:,1], MFF)
		# dat[:,0] = smooth(dat[:,0]) #This now occurs in assemble BEFORE resampling
		ds.datinit(dat, {"SampleType":"timeseries", "StartTime":0.0, 
			"Labels":["HairPosition","MicroFlownVelocity"], "SamplesPerSecond":SIGFS})
		nfn = '%s_compositSinData.mdat' % exp
		io.write(ds, nfn, newdoc=True)
		tfds = ctf.tffmax(ds, False)
		if not tfds:
			continue
		tfds.data[:,2]+=pi
		tfds.data[:,2] = smoothphase(tfds.data[:,2])
		io.write(tfds, "%s_SinTF_Function.mdat" % exp, newdoc=True)
		tf = tfds.getData()
		tf = row_stack( [array([[0,0,0]]), tf, array([[250,tf[-1,1],tf[-1,2]]])])
		tf = ctf.uniformsample(tf, 1.0)
		tfds = miendata.newData(tf, {'Name':exp+"SinTF", 'SampleType':'timeseries', 'SamplesPerSecond':1.0, "StartTime":0})
		io.write(tfds, "%s_SinTF_ResampledTimeseries.mdat" % exp, newdoc=True) 
Example #4
0
def assemble(filepairs, dname):
	dat_all = []
	for fp in filepairs:
		bfn = os.path.join(dname, fp[0])
		afn = os.path.join(dname, fp[1])
		dbin = io.read(bfn).getElements("Data")[0]
		daos = io.read(afn).getElements("Data")[0]
		trig= dbin.getData()[:,TRIGCHAN]
		ind = argmax(trig[1:] - trig[:-1])+1
		mfd = dbin.getData()[ind:,MFCHAN]
		dbin.datinit(mfd, {"SampleType":"timeseries", "StartTime":0.0, 
			"Labels":["MicroFlownVoltage"], "SamplesPerSecond":dbin.fs()})
		tsd = smooth(daos.getData(),daos.attrib("SamplesPerSecond"))  #remove clicks BEFORE resampling
		daos.datinit(tsd, {"SampleType":"timeseries", "StartTime":0.0, 
			"Labels":["HairPosition"], "SamplesPerSecond":daos.fs()})
		resample(dbin, SIGFS)
		resample(daos, SIGFS)
		dat2 = dbin.getData()
		dat1 = daos.getData()
		if dat1.shape[0] < dat2.shape[0]:
			dat2 = dat2[:dat1.shape[0]]
		elif dat1.shape[0] > dat2.shape[0]:
			dat1 = dat1[:dat2.shape[0]]
		dat1 -=  dat1.mean()
		dat2 -= dat2.mean()
		dd = column_stack([dat1, dat2])
		dat_all.append(dd)
	dat = row_stack(dat_all)
	ds = miendata.newData(dat, {'SampleType':'timeseries', 'SamplesPerSecond':SIGFS,
		'StartTime':0.0, "Labels":['HairPosition', 'MicroFlownVoltage']})
	return ds
Example #5
0
	def makeMasks(self, event):
		#updated
		c = self.cell
		points = reshape(c.get_drawing_coords(), (-1, 8))
		diams = (points[:,3]+points[:,7])/2
		#filt = diams < 8.5
		#get abstract model data file
		d = self.gui.askParam([{"Name":"Density Model File",
 							 "Value":'classModelsOnly.mien',
 							 "Browser":FileBrowse}])
		if not d:
			return
		md = io.read(d[0])	
		masks = md.getElements("AbstractModel", depth=1)
		#for each model, construct masks 
		for mask in masks:
			gmm = mask.getElements('MienBlock', {'Function':'ccbcv.gmm.gmm'})
			if not gmm:
				continue
			gmm = gmm[0]
			dat = maskPoints(mask, points)
			#dat = dat*filt[:,newaxis]*mask.attrib('total_weight')
			dat = dat*mask.attrib('total_weight')
			a=  {'Name':mask.name().rstrip('gm'),'SampleType':'mask'}
			for atr in mask.attributes:
				if atr.startswith('meta_'):
					a[atr] = mask.attrib(atr)
			d = newData(dat,  a)
			c.newElement(d)
		self.gui.update_all(object=c, event="Rebuild")
		self.gui.report("finished masking")
Example #6
0
def procFile(fn, rot, nfn):
	f = open(fn, 'rb')
	h = getInfo(f)
	nframes=h['nframes']
	print "converting %s (%i frames)" % (fn, nframes)
	if nframes<1000:
		frames = arange(nframes)
	else:
		frames = None
	dat = chunkOFrames(fn, frames)
	me = dat.mean(3)[...,0]
	out = []
	for i in range(nframes):
		frame = readFrameN(f, h, i) - me
		if rot!=None:
			frame, junk = imrotate(frame, 1, rot)
		x = frame.sum(1)
		if options['w']>1:
			x=convolve(ones(options['w']), x, mode='same')
		out.append(argmax(x))
		if not i%500:
			print "... frame %i" % i
	out = array(out, float32)
	dat = mdat.newData(out, {'SampleType':'timeseries', 'SamplesPerSecond':1.0/h['timescale']})
	doc = nmpml.blankDocument()
	doc.newElement(dat)
	io.write(doc, nfn)	
Example #7
0
def sum_stack(fn,sw):
	'''Measures relative illumination between frames. Sums the values of each frame, divides by the number of pixels, and removes the mean.'''
	dat, fs = retrieveData(fn[0],sw)
	nframes = dat.shape[3]
	print('processing image stack: %ix%i, %i frames ...' % (dat.shape[0], dat.shape[1], nframes))
	s = zeros(nframes)
	for k in arange(nframes):
		s[k]=sum(dat[:,:,0,k])
	numpix = float32(dat.shape[0]*dat.shape[1])
	s = s/numpix
	s -= s.mean()
	if len(s.shape)==2:
		s=s.reshape(-1,)
	h = nmpdat.newHeader(fs=fs)
	nd = nmpdat.newData(s, h)
	doc = nmp.blankDocument()
	doc.newElement(nd)
	if sw.has_key('dir'):
		a = io.write(doc,sw['dir'] + '/' + sw['sum_file'])
	else:	
		a = io.write(doc,sw['sum_file'])
	if a:
		print "%s successfully written." % sw['sum_file']
	else:	
		print "%s failed to write." % sw['sum_file']
	
	return s, fs
Example #8
0
def knit(dname):
	if os.path.isfile(os.path.join(dname, 'concat_ex.mdat')):
		os.unlink(os.path.join(dname, 'concat_ex.mdat'))
	if os.path.isfile(os.path.join(dname, 'concat.mdat')):
		os.unlink(os.path.join(dname, 'concat.mdat'))		
	print("=== processing directory %s ===" % dname)
	dat_all = []
	dat_ex = []
	mf_allch = []
	mdat = [f for f in os.listdir(dname) if f.endswith("_ts.mdat")]
	date = mdat[0][:10]
	if all([f[:10] == date for f in mdat]):
		if os.path.isfile(os.path.join(dname, date+'concat_ex.mdat')):
			os.unlink(os.path.join(dname, date+'concat_ex.mdat'))
		if os.path.isfile(os.path.join(dname, date+'concat.mdat')):
			os.unlink(os.path.join(dname, date+'concat.mdat'))		
	else:
		print "Multiple experiments present -- aborting. Put separate experiments in different folders."
		return None	
	bin = [f for f in os.listdir(dname) if f.endswith(".bin")]
	for f in mdat:
		ff1=os.path.join(dname, f)
		f2 = getMatch(f[:-8], bin)
		if not f2:
			print("can't match %s" % (f,))
			continue
		ff2 = os.path.join(dname, f2)
		print("adding file pair %s, %s" % (f, f2))
		dat1 = io.read(ff1).getElements("Data")[0]
		dat2 = io.read(ff2).getElements("Data")[0]
		dat2 = cropMicroflown(dat2)
		# crpd = dat2.getData()
		# ds = miendata.newData(crpd, {'SampleType':'timeseries', 'SamplesPerSecond':10000})
		# doc = nmpml.blankDocument()
		# doc.newElement(ds)
		# io.write(doc, os.path.join(dname, 'crpd.mdat'))
		resample(dat1, 1000)
		resample(dat2, 1000)
		dat1 = dat1.getData()
		dat2 = dat2.getData()[:,2]
		if dat1.shape[0] < dat2.shape[0]:
			dat2 = dat2[:dat1.shape[0]]
		elif dat1.shape[0] > dat2.shape[0]:
			dat1 = dat1[:dat2.shape[0]]
		dat1 -=  dat1.mean()
		dat2 -= dat2.mean()
		dd = column_stack([dat1, dat2])
		dat_all.append(dd)
		if not any([q in f.lower() for q in EXCLUDE]):
			dat_ex.append(dd)
	dat = row_stack(dat_all)
	ds = miendata.newData(dat, {'SampleType':'timeseries', 'SamplesPerSecond':1000})
	doc = nmpml.blankDocument()
	doc.newElement(ds)
	io.write(doc, os.path.join(dname, date+'concat.mdat'))
	if len(dat_ex) < len(dat_all):
		dat = row_stack(dat_ex)
		ds.datinit(dat)
		io.write(doc, os.path.join(dname, date+'concat_ex.mdat'))
Example #9
0
	def findDataElement(self):
		de = self.getElements("Data")
		if de:
			self.data = de[0]
		else:
			from mien.nmpml.data import newData
			self.data = newData(zeros((0,2)),{"SampleType":"labeledevents", "SamplesPerSecond":20000})
			self.newElement(self.data)
Example #10
0
 def setCellData(self, a, fs):
     d = self.getElements("Data")
     if d:
         d = d[0]
         d.datinit(a, {"SampleType": "timeseries", "SamplesPerSecond": fs})
     else:
         d = newData(a, {"Name": "activation_data", "SampleType": "timeseries", "SamplesPerSecond": fs})
         self.newElement(d)
Example #11
0
def maskPoints(am, points):
	pts = (points[:,0:3]+points[:,4:7]) / 2
	rads = (points[:,3]+points[:,7]) / 4
	ds = newData(pts, {"Name":'points', "SampleType":"generic"})
	am.run(ds)
	vals = ds.getSubData('gmmout').getData()
	vals = vals * rads[:,newaxis]
	return vals
Example #12
0
	def plot(self, a, h):	
		if not self.dv.data or self.dv.data.__tag__!="Data":
			dat=newData(None, {'SampleType':'group'})
			self.dv.document.newElement(dat)
			self.dv.report('Auto-generating Data element')
			self.dv.data=dat
			self.dv.onNewData()
		self.dv.data.datinit(a, h)
		self.dv.update_all(object=self.dv.data)
Example #13
0
def writeFile(f, sps, fname='CubicFunctions', start=0):
    ndoc = nmpml.blankDocument()
    tf = mdat.newData(
        f, {
            'Name': fname,
            'SampleType': 'timeseries',
            'SamplesPerSecond': sps,
            "StartTime": start
        })
    ndoc.newElement(tf)
    io.write(ndoc, fname + ".mdat")
Example #14
0
	def getData(self):
		de = self.getElements("Data")
		if de:
			return de[0]
		else:
			print "Can't find a data element. Making an empty one"
			from mien.nmpml.data import newData
			attrs = {"Type":"sfield", "Edge":self.attrib("Edge"), "Origin":self.attrib("Origin")}
			a=array([[[]]])
			d = newData(a, attrs)
			self.newElement(d)
			return d
Example #15
0
def saveView(cv):
	vg=viewGroup(cv)
	pv = vg.getElements('Data', {'SampleType':'CVViewpoint'}, depth=1)
	vpn = "View%03d" % len(pv)
	d=cv.askParam([{'Name':'Name This View', 'Value':vpn}])
	if not d:
		return
	n=d[0]
	nd = newData(None, {'SampleType':'CVViewpoint'})
	vg.newElement(nd)
	nd.setName(n)
	view=getCurrentVP(cv)
	nd.datinit(view)
	cv.report('view added')
Example #16
0
File: table.py Project: gic888/MIEN
	def findDataElement(self):
		datr = ["Type", "Labels", "Columns", "Url", "DataType", "SamplesPerSecond"]
		attrs = {"Type":"Auto"}
		for k in datr:
			if self.attributes.has_key(k):
				attrs[k]=self.attrib(k)
				del(self.attributes[k])
		de = self.getElements("Data")
		if de:
			self.data = de[0]
		else:
			print "Can't find a data element. Making an empty one"
			from mien.nmpml.data import newData
			self.data = newData(zeros((0,0), float32),  {'SampleType':'Locus'})
			self.newElement(self.data)
Example #17
0
def pts2data(arg):
    """pts2data  fname [fname2 [...]] - extract points from sphere fiducials to a text data file

This function will load the contents of all the named files (provided they are readable by MIEN), 
select all the Fiducial elements that have Style == spheres, extract the point data from these
elements, concatenate the data into a single data element, and print that data to standard output
in text data file format. You can use shell redirection (e.g. "pts2data fname > data.txt") to 
write the points to a file.
"""
    doc = io.readall(arg)
    fid = doc.getElements("Fiducial", {"Style": "spheres"})
    pts = [f.getPoints() for f in fid]
    pts = row_stack(pts)
    d = md.newData(pts, {"SampleType": "generic"})
    f = io.match_extension("txt")[0]
    io.write(d, sys.stdout, format=f, newdoc=True)
Example #18
0
def spikeDistanceTest(st, rt):
	m, id1, id2=getAllDist(st, rt)
	print m
	d=makeEquiv(m, id1, id2)
	print d
	de=newData(zeros(0), {'SampleType':'group'})
	for i in range(d.shape[0]):
		rst=rt.getData()
		print rst.shape
		rst=take(rst[:,0], nonzero(rst[:,1]==d[i,1])[0], 0)
		est=st.getData()
		est=take(est[:,0], nonzero(est[:,1]==d[i,0])[0], 0)
		cst=zeros((rst.shape[0]+est.shape[0],2))
		cst[:rst.shape[0],0]=rst
		cst[rst.shape[0]:,0]=est
		cst[rst.shape[0]:,1]=1
		cde=de.createSubData("/unit%i" % (int(d[i,1]),), cst, {'SampleType':'labeledevents', 'SamplesPerSecond':rt.fs()})
	io.write(de, "check_results.mdat", format=".mdat", newdoc=True)
def allTFsRs(fnames):
	ndoc = nmpml.blankDocument()
	for fname in fnames:
		doc = io.read(fname)
		ds = doc.getElements("Data", depth=1)[0]
		bn=os.path.splitext(fname)[0]
		tf = tffmax(ds, False)
		if not tf:
			continue
		tf = tf.getData()
		if fname.startswith("2009") or fname.startswith("2010"):
			tf[:,2]+=pi
			tf[:,2] = tf[:,2] -  ( 2*pi*(tf[:,2]>pi))
		tf = row_stack( [array([[0,0,0]]), tf, array([[250,tf[-1,1],tf[-1,2]]])])
		tf = uniformsample(tf, 1.0)
		tf = mdat.newData(tf, {'Name':bn, 'SampleType':'timeseries', 'SamplesPerSecond':1.0, "StartTime":0})
		ndoc.newElement(tf)
	io.write(ndoc, "allTFsResamp.mdat")		
Example #20
0
	def _getBiasData(self, rerun=False):
		bd=self.document.getElements('Data', 'BiasField', depth=1)
		if bd and not rerun:
			return bd[0]
		if bd:
			bd=bd[0]
			bd.datinit(None, {'SampleType':'group'})
		else:
			bd=newData(None, {'Name':'BiasField', 'SampleType':'group'})
			self.document.newElement(bd)
		els=[0,1]
		if self.biasmode=='conditioned':
			els.append(2)
		try:	
			bd=self.alg.run(bd, els)
		except:
			raise
			bd=self.alg.run(bd, [0])
		return bd
Example #21
0
def saveWaves(spi, bn, t, dat):
    fn = bn + "_%s.mdat" % (t.name(),)
    tem = t.getElements("Data", "template", depth=1)[0]
    lead = tem.attrib("Lead")
    length = tem.getData().shape[0]

    # ignore spikes too close to the ends of the data file
    spi = spi[where(spi > lead)]
    spi = spi[where(spi < (dat.shape()[0] - (length - lead)))]
    if spi.shape[0] < 1:
        return

    waves = takewindows(dat.getData(), spi, lead, length)
    head = {"SampleType": "ensemble", "Lead": lead, "Reps": spi.shape[0], "SamplesPerSecond": dat.fs()}
    waves = newData(waves, head)
    h = waves.createSubData("/hidden")
    h.newElement(t.clone(False))
    h.newElement(tem.clone(False))
    io.write(waves, fn, newdoc=True, format=".mdat")
Example #22
0
	def makeUniformMask(self, event):
		d=self.askParam([{"Name":"Density",
							  "Value":.1},
							 {"Name":"Name",
							  "Value":"AfferentClassXX"},
							 {"Name":"Which Sections",
							  "Type":"List",
							  "Value":["All", "Only selected sections",
									   "Only non-selected sections"]}])
		if not d:
			return
		points = self.cell.get_drawing_coords()
		nc=reshape(points, (-1, 8)).shape[0]
		maskdat=ones(nc, float32)*d[0]
		if d[2]!="All":
			m=self.sectionMask(("non" in d[2]))
			if m==None:
				return
			maskdat=maskdat*m
		d = newData(reshape(maskdat, (-1,1)),  {'Name':d[1],'SampleType':'mask'})
		self.cell.newElement(d)
		self.gui.update_all(object=d, event="Create")
Example #23
0
	def makeMasks(self, event):
		c = self.cell
		points = reshape(c.get_drawing_coords(), (-1, 8))
		diams = (points[:,3]+points[:,7])/2
		of = open('modeltemplate.txt', 'w')
		for p in points:
			of.write("%.3f %.3f %.3f %.3f %.3f %.3f %.3f %.3f\n" % tuple(p))
		of.close()
		check=os.system("matlab -nojvm -nosplash -r \"maskalldata('modeltemplate.txt'); quit\"")
		if check:
			self.report("system call to matlab maskalldata failed")
			return
		
		lines=open("masked_modeltemplate.txt").readlines()
		tags = array(map(lambda l:map(float, l.split()),lines))
		filt = diams < 8.5
		tags = tags*filt[:,NewAxis]
		for i in range(tags.shape[1]):
			n = "AfferentClass%02d" % (i+1,)
			d = newData(tags[:,i:i+1],  {'Name':n,'SampleType':'mask'})
			c.newElement(d)
		self.gui.update_all(object=c, event="Rebuild")
Example #24
0
def newFile(s,d=()):
	d=dict(d)
	nd = nmpdat.newData(s, d)
	doc = nmp.blankDocument()
	doc.newElement(nd)
	return doc
Example #25
0
def comp_series(fn,sw):
	'''Compares illumination of movie images to stimulus generator record running the LED.'''
	if 'n' in sw.keys():
		s, fs = retrieveData(fn[0],sw)
	else:
		s, fs = sum_stack(fn,sw)
	if len(s.shape)==2 and s.shape[1] == 1:
		s=s.reshape(-1,)
	b, fsb = retrieveData(fn[1],sw)
	if 'e' in sw.keys():
		es, esind = near_event_finder(s,fs,stdnum=5,numback=6)
		eb, ebind = near_event_finder(b[:,0],fsb,thresh=0.1)
	elif 'f' in sw.keys():
		es, esind = subfs_event_finder(s,fs)
		eb, ebind = subfs_event_finder(b[:,0],fsb,5)		
	else:
		es, esind = event_finder(s,fs)
		eb, ebind = event_finder(b[:,0],fsb,thresh=1)
	df = len(eb) - len(es)
	if df:
		z=zeros(df,s.dtype)
		es=hstack([es,z])
	tdiff = es-eb
	#Start: code to ensure all events are detected
	bb = b[:,0]
	z = zeros_like(bb)
	z[ebind] = 1
	btot = vstack([bb,z]).T
	hb = nmpdat.newHeader(fs=fsb, l=['RawStimulus','Events'])
	ndb = nmpdat.newData(btot, hb)
	docb = nmp.blankDocument()
	docb.newElement(ndb)
	if sw.has_key('dir'):
		a = io.write(docb,sw['dir'] + '/' + sw['stim_file'])
	else:	
		a = io.write(docb,sw['stim_file'])
	if a:
		print "%s successfully written." % sw['stim_file']
	else:	
		print "%s failed to write."  % sw['stim_file']
	zs = zeros_like(s)
	zs[esind] = 1
	stot = vstack([s,zs]).T
	hs = nmpdat.newHeader(fs=fs, l=['CameraSum','Events'])
	nds = nmpdat.newData(stot, hs)
	docs = nmp.blankDocument()
	docs.newElement(nds)
	if sw.has_key('dir'):
		a = io.write(docs,sw['dir'] + '/' + sw['cam_file'])
	else:	
		a = io.write(docs,sw['cam_file'])
	if a:
		print "%s successfully written." % sw['cam_file']
	else:	
		print "%s failed to write." % sw['cam_file']
	#End: code to ensure all events are detected
	tot = vstack([es,eb,tdiff]).T
	h = nmpdat.newHeader(fs=1.0, l=['VideoEvents','StimulusEvents','TimeDifference'])
	nd = nmpdat.newData(tot, h)
	doc = nmp.blankDocument()
	doc.newElement(nd)
	if sw.has_key('dir'):
		a = io.write(doc,sw['dir'] + '/' + sw['comp_file'])
	else:	
		a = io.write(doc,sw['comp_file'])
	if a:
		print "%s successfully written." % sw['comp_file']
	else:	
		print "%s failed to write." % sw['comp_file']
Example #26
0
	ang = options['a']
	crap, index = imrotate(dat, ang)
else:
	ang, index = guessAngle(dat, options['t'])
print "guessing angle to be %i" % ang
print "processing %i frames ..." % nframes
out = []
#nframes = min(600, nframes)
for i in range(nframes):
	frame = readFrameN(f, h, i) 
	if options['m']:
		frame = frame - mean
	frame, junk = imrotate(frame, ang, index)
	x = frame.sum(1)
	if options['w']>1:
		x=convolve(ones(options['w']), x, mode='same')
	out.append(argmin(x))
	if not i%500:
		print i
out = array(out, float32)
if L:	
	L=L*AOS_PixPerDiv/NIKON_PixPerDiv	
	out-=out.mean()
	out = out/L

dat = mdat.newData(out, {'SampleType':'timeseries', 'SamplesPerSecond':1.0/h['timescale']})
doc = nmpml.blankDocument()
doc.newElement(dat)
io.write(doc, fnameout)