def _getthresh(fd, pos): fobj = sumfile.get_sum_file_fd(fd, baseposition=pos) evd = sumcalc.make_all_events(fobj) td = {} tkt = evd.keys() tkt.sort() ##print len(tkt) for t in tkt: evl = evd[t] thr = None for ev in evl: if ev.type == sumfile.SUM_THRESH: thr = ev.eThresh if thr: td[t * 256] = thr return td
def _getthresh(pathname, fobj=None): if not fobj: try: fobj = sumfile.get_sum_file(pathname[:-4] + '.sum') except: return None evd = sumcalc.make_all_events(fobj) td = {} tkt = evd.keys() tkt.sort() ##print len(tkt) lastthr = 0.0 for t in tkt: evl = evd[t] thr = None for ev in evl: if ev.type == sumfile.SUM_THRESH and ev.newevent: thr = ev.eThresh #print thr break if thr: td[t] = thr thrdict = {} tdk = td.keys() tdk.sort() pos = 0 for i, k in enumerate(tdk): thr = td[k] while pos < k: thrdict[pos] = thr pos += 1 thrdict[pos] = thr pos += 1 while pos <= fobj.number_seconds: thrdict[pos] = thr pos += 1 return thrdict
def csv_dump_out(fo, fobj, rpt=0): "dump one file" rslt = [] if len(fobj.zscore): has_zscore = 1 else: has_zscore = 0 # dump header for csv file if fo == None: rslt = [] row = '' for t in headers: row = row + '"%s",' % t for t in headn: for i in range(fobj.number_streams): v = '"%s-%d",' % (t, i) row = row + v if fobj.has_total_in_percent: if t == '%over': row = row + '"%total",' # there is one extra value - total % if t == 'Ampl': row = row + '"Scored",' for i in range(fobj.number_streams): for t in headf: v = '"%s-%d",' % (t, i) row = row + v if len(fobj.periph): row = row + '"Periph",' has_periph = fobj.periph[0] else: has_periph = None for i in range(fobj.number_lowpass): if i == fobj.number_lowpass - 1: last = ' ' else: last = ',' v = '"%s-%c"%c' % (headc[0], chr(ord('A') + i), last) row = row + v if has_zscore: endpt = 7 if fobj.zscore_kind: endpt = 3 for i in range(endpt): for f in range(8): row = row + ',"%s %s"' % (ztitle[i], zfreq[f]) if fobj.zscore_kind == 0: for i in range(7, 9): for f in range(10): row = row + ',"%s %d"' % (ztitle[i], f) for i in range(fobj.number_streams): row = row + ',EndAmpl-%d' % i max_seconds = fobj.number_seconds if max_seconds == 0: return [] rslt.append(row) # get timed event list evdict = sumcalc.make_all_events(fobj) ## for sec in range(0,max_seconds): ## e=evdict.get(sec,None) ## if e: ## for ex in e: ## print sec,ex.eThresh,ex.ePeriod # for every second, dump a row numlow = fobj.number_lowpass numtot = fobj.number_streams prevperiod = 0 eMode = '' eRDirs = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] eFreq = [(0.0, 0.0), (0.0, 0.0), (0.0, 0.0), (0.0, 0.0), (0.0, 0.0), (0.0, 0.0), (0.0, 0.0), (0.0, 0.0), (0.0, 0.0), (0.0, 0.0), (0.0, 0.0), (0.0, 0.0), (0.0, 0.0), (0.0, 0.0), (0.0, 0.0), (0.0, 0.0), (0.0, 0.0), (0.0, 0.0)] eSites = ['', '', '', ''] eThresh = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] eScale = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] ePeriod = 0 eMarkdata = '' eMarklast = '' eEyes = '' pgame = '' eoec = update_eyes(fobj) eyes = 0 for sec in range(0, max_seconds): if eoec.has_key(sec): eyes = eoec[sec] if not evdict.has_key(sec): continue row = '' mxdt = mx.DateTime.DateTimeFromAbsDateTime(fobj.datecode, float(fobj.timecode + sec)) if 0: for q in range(len(evdict[sec])): e = evdict[sec][q] ePeriod = evdict[sec][0].ePeriod if e.type == sumfile.SUM_FREQ: eFreq = e.eFreq elif e.type == sumfile.SUM_SITE: eSites = e.eSites elif e.type == sumfile.SUM_PERIOD: ePeriod = e.ePeriod elif e.type == sumfile.SUM_MARK: if rpt: print 'Mark', e.eMarks, eMarkdata if eMarkdata == '': eMarkdata = e.eMarks else: eMarkdata = eMarkdata + '/' + e.eMarks elif e.type == sumfile.SUM_MODE: eMode = e.eMode[0] e.RDirs = e.eMode[1] if e.RDirs[15] == 1: eEyes = 'Open' elif e.RDirs[15] == 2: eEyes = 'Closed' elif e.type == sumfile.SUM_THRESH: eThresh = e.eThresh #print sec,q,eThresh elif e.type == sumfile.SUM_SCALE: eScale = e.eScale break else: e = evdict[sec][0] if e.Freq: eFreq = e.Freq if e.Sites: eSites = e.Sites ePeriod = e.Period if e.Mode: eMode = e.Mode[0] eRDirs = e.Mode[1] else: pass if e.Thresh: eThresh = e.Thresh if e.Scale: eScale = e.Scale if e.Marks: if eMarkdata == '': eMarkdata = e.Marks[:] else: eMarkdata = eMarkdata + '/' + e.Marks[:] ## for ex in evdict[sec]: ## if ex.Marks: ## ##print 'MARK ',sec,ex.Marks ## if eMarkdata == '': ## eMarkdata=ex.Marks[:] ## else: ## eMarkdata=eMarkdata+'/'+ex.Marks[:] Overpct = 0 if ePeriod == 0 and prevperiod: endperiod = prevperiod Overpct = e.overall_reward else: endperiod = 0 prevperiod = ePeriod #deltatime,#raw,#total,period#,score,periodendflag,fbmode ##print sec,numlow,numtot,prevperiod,0,endperiod,eMode if eMode == None: eMode = '' try: over = fobj.pct[0].data[sec] except: over = [0] deltascore = over[ 0] % 100 # get the fractional part which is the delta score row = row + '%d,%d,%d,%d,%d,%d,%d,"%s","%s"' % ( sec, numlow, numtot, prevperiod, deltascore, endperiod, Overpct, eMode, ('', 'EOpen', 'EClosed')[eyes]) if eMarklast == eMarkdata: eMarkdata = '' else: eMarklast = eMarkdata #,marktype,markdata,fmtstring,date,time, if fobj.gameid != pgame: pgame = fobj.gameid gm = pgame else: gm = '' row = row + ',"%s","%s","%s","%s","%s",' % ( gm, eMarkdata, fobj.formatstring, fobj.datestring, mx.DateTime.ISO.str(mxdt)[11:19]) eMarkdata = '' hadscore = 0 #ampl0,ampl1,..., (# of streams total) try: amp = fobj.seg[0].data[sec] except: amp = [0 for i in range(numtot)] for i in range(numtot): row = row + '%6.2f,' % (abs(int(amp[i])) / 100.0) if amp[i] < 0: hadscore = 1 # remember that scoring occurred row = row + '%d,' % hadscore #%over0,%over1,..., (# of streams total) mynum = numtot if fobj.has_total_in_percent: mynum += 1 for i in range(mynum): if i >= len(over): v = 0 else: v = over[i] row = row + '%d,' % (v / 100) #thresh0,thresh1,..., (# of streams total) thr = eThresh for i in range(numtot): try: tval = float(abs(thr[i])) except: tval = 0.0 row = row + '%6.2f,' % (tval) #average values try: movavg = fobj.avg[0].data[sec] except: movavg = [0 for i in range(numtot)] for i in range(numtot): row = row + '%6.2f,' % (abs(int(movavg[i])) / 100.0) #rwdirs0,rsdirs1,..., (# of streams total) rwd = eRDirs if rwd == None: rwd = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] for i in range(numtot): row = row + '%d,' % (rwd[i], ) #frqlo0,frqhi0,...... (# of streams total times 2) frql = eFreq for i in range(numtot): try: fv = frql[i] except: fv = (0.0, 0.0) row = row + '%6.3f,%6.3f,' % fv if has_periph: try: pval = has_periph.data[sec][0] pval = float(pval) except: pval = -10.0 row = row + '%6.3f,' % (pval / 10, ) #siteA,siteB,..., (# of raw streams) sl = eSites for i in range(numlow): if i == numlow - 1: last = ' ' else: last = ',' if i >= len(sl): site = '' else: site = sl[i] row = row + '"%s"%c' % (site, last) if has_zscore: if sec < len(fobj.zscore[0].data): dta = fobj.zscore[0].data[sec] endpt = 7 if fobj.zscore_kind: endpt = 3 for i in range(endpt): for f in range(8): row = row + ',%.2f' % (int(dta[8 * i + f]) / 100.0) if fobj.zscore_kind == 0: for i in range(7, 9): for f in range(10): if i > 7: adjust = 2 else: adjust = 0 row = row + ',%.2f' % ( int(dta[8 * i + adjust + f]) / 100.0) if endperiod: for i in range(fobj.number_streams): row = row + ',%.2f' % fobj.per[endperiod - 1].ampl[i] else: for i in range(fobj.number_streams): row = row + ',' rslt.append(row) if fo: fo.write('\n'.join(rslt)) fo.write('\n') # trailing newline return rslt
def refilter(root, fn, edict=None, recompute=0): if root: bob = BusyBox.BusyBox(root, 'Busy processing raw data') ans = rawlist.rawinfo(fn) if ans[0] == None: if root: bob.destroy() # oops return None # couldn't open # must also insert the site and eyes open/close changes cbxx = rawlist.rawcb(fn) cbl = cbxx[0] true_length = ans[0] if len(cbl): ans = _handle_other_events(ans, cbl) # now figure out the periods plist = _decode_evlist(ans) fobj = sumfile.get_sum_file(fn[:-4] + '.sum') evd = sumcalc.make_all_events(fobj) get_freqset(edict) dofilts.Load_Filter_File(edict) dofilts.Load_Raw_File(fn) d = {} per = 1 for itm in plist: # get the starting sample, and length end = itm[1] if end == 0: end = true_length begin = itm[0] if recompute: start = begin else: start = end - 30 * 256 if start < begin: start = begin if start < 0: start = 0 length = (end - start) / 256.0 for chan in range(len(itm[2])): # compute them clamp = _getclamp(evd, chan, start, end) move = dofilts.Get_Average_Value(chan, freqlims, start, length, trueaverage=1, clamp=clamp, useartifact=avalue) if len(move) == 0: continue ##print itm,chan,move ##print start,length,true v = Value() v.theta = move[0][0] v.alpha = move[0][1] v.beta = move[0][2] v.hibeta = move[0][3] v.values = move[0][:] # for newer method v.period = per v.site = itm[2] v.eyes = itm[3] v.rewards = itm[-1] v.freqlims = freqlims # following for debug only v.tx = move[2] v.secs = length v.sums = move[3] v.count = move[4] d['%2d%02d' % (per, chan)] = v if len(itm[2]) > 1: d['%2dxx' % (per, )] = len(itm[2]) per += 1 if root: bob.destroy() dofilts.Unload_Filter_File() return d
def get_annotation_list(summary): ''' Return list of annotations from summary file ''' global MAX_CHAN_LENGTH annotations = [] # list of annotations eventlist = sumcalc.make_all_events( summary) # dict of all events keyed by time in seconds time = -1 position = 0 period = 1 for event in summary.events: ### must use MAX_CAHN_LENGTH and it must get set to tru length if event.start >= MAX_CHAN_LENGTH: # If reach end of data, cut off annotations break if time == event.start: # Get the correct position for the DataSegment in eventlist. position += 1 # We need this because there can be multiple DataSegment (events) else: # for any specific time. time = event.start # position = 0 # # We only care for events of type: # FREQ, SITE, MODE, MARK, BEGIN, and END if event.typename in TRIGGERS: type = event.typename time = event.start temp_data = '' temp_data2 = '' if type == 'SITE ': for site in event.sites: temp_data += site + ',' temp_data = temp_data[:-1] elif type == 'MODE ': try: temp_data = 'MODE ' + eventlist[time][position].Mode[0] except: # For old version 4.0.3h and before temp_data = 'MODE ' + eventlist[time][position - 1].Mode[0] try: v = eventlist[time][position].Mode[1][15] if v == 1: temp_data2 = 'Open' elif v == 2: temp_data2 = 'Closed' except: pass elif type == 'FREQ ': temp_data = 'FREQ ' + combine_freq( eventlist[time][position].Freq) elif type == 'THRESH ': temp_data = 'THRESH ' + combine_thresh( eventlist[time][position].Thresh) elif type == 'BEGIN': temp_data = 'BEGIN ' + str(period) elif type == 'MARK ': if event.markdata == '0 Impedance check OFF': continue else: temp_data = 'MARK ' + event.markdata elif type == 'END': temp_data = 'END ' + str(period) period += 1 else: return 0 annotations.append([time, temp_data, type]) if temp_data2 != '': annotations.append([time, temp_data2, 'Eyes']) temp_data2 = '' return annotations
def ExportResult(root, edict, sumpath, header=0): "export a single results csv file" # read file using sumfile fobj = sumfile.get_sum_file(sumpath, 1) if not fobj: return # some error reading summary file if header == 0: if os.environ.has_key('WANTRESULTSHEADER'): header = 2 # determine first two protocol groups evd = sumcalc.make_all_events(fobj) if 0: for pernum in range(len(fobj.per)): per = fobj.per[pernum] print pernum, per.start, per.end site = '' rmode = '' rdir = '' # this works by finding the 'time' of all changes in site, rmode, rdir # after all have been set (ignore first 10 seconds....), a change means a new "protocol" changelist = [] dt = 1 chkprint = 0 prevperiod = 0 change = 0 while 1: if not evd.has_key(dt): break evl = evd[dt] for ev in evl: ##print ev.__dict__ if ev.newevent: if ev.Sites and ev.Sites != site: ##print 'site change',site,ev.Sites site = ev.Sites change = 1 if ev.Mode and (ev.Mode[0] != rmode or ev.Mode[1] != rdir): if ev.Mode[0] != rmode: ##print 'fb change',rmode,ev.Mode[0] rmode = ev.Mode[0] change = change | 2 if ev.Mode[1] != rdir: ##print 'rdir change',rdir,ev.Mode[1] rdir = ev.Mode[1] change = change | 4 if chkprint: print dt, change, ev.newevent, ev.Period, prevperiod if change: if ev.Period and prevperiod != ev.Period: # save accumulated values dome = 1 if len(changelist): if changelist[-1][1] == site and changelist[-1][ 2] == rmode and changelist[-1][3] == rdir: dome = 0 # ignore same things if dome: changelist.append((dt, site, rmode, rdir, ev.Period)) if chkprint: print 'append ', changelist[-1] prevperiod = ev.Period change = 0 ## # now we must remove extraneous changes (duplicate types during rest periods!!!) ## # to do this we keep only the LAST site/mode/dir during a rest period ## if not ev.Period: ## if len(changelist): ## print changelist ## if changelist[-1][4] < 1: ## if changelist[-1][4] == -1: ## nlist=(dt,site,rmode,rdir,ev.Period) ## else: ## nlist=(changelist[-1][0],site,rmode,rdir,ev.Period) ## if chkprint: ## print 'update ',len(changelist)-1,changelist[-1],nlist ## changelist[-1]=nlist ## elif dt > fobj.per[0].start: ## changelist.append((dt,site,rmode,rdir,ev.Period)) ## if chkprint: ## print 'append ',changelist[-1] ## ## if len(changelist) == 0 and dt > fobj.per[0].start: ## # force in "starting" values ## changelist.append((dt,site,rmode,rdir,-1)) ## if chkprint: ## print 'forced ',changelist[0] dt += 1 if len(changelist) < 1: return 0 # too few entries pgroups = [] ##print "changelist",changelist if len(changelist) > 1: for dta in range(len(changelist)): dt, site, rmode, rdir, pnum = changelist[dta] if dta == len(changelist) - 1: endval = fobj.number_seconds else: endval = changelist[dta + 1][0] - 1 pgroups.append((dt, endval, site, rmode, rdir)) if len(pgroups) < 1: pgroups.append( (10, fobj.number_seconds, site, rmode, rdir)) # one long entry ##print ##print pgroups # determine channels to use clist = [] # if fobj.protocol == AT and fmt string == CCRRII, first R is actually the inhibit isminimap = 1 numlowpass = 0 for i in range(len(fobj.formatstring)): ch = fobj.formatstring[i] if ch == 'C': numlowpass += 1 continue elif ch == 'M': continue elif ch == 'I': isminimap = 0 if len(clist) == 0: clist.append(i) elif len(clist) == 2: clist.append(i) break # found 2nd inhibit elif ch == 'R': isminimap = 0 if fobj.formatstring == 'CCRRII': clist.append(i) else: if len(clist) == 1: clist.append(i) if isminimap: # not a valid session sum file (no inhibits/rewards to get values) for i in range(len(fobj.formatstring)): if fobj.formatstring[i] == 'M': clist.append(i) ngroups = len(pgroups) #print 'minimap',len(clist) else: if len(clist) != 3: return # reorganize ngroups = 2 # normal session format hold = clist[1] clist[1] = clist[0] # move 1st inhibit clist[0] = hold # make reward first # recompute sd for each group (handles start/top times by only using in-period values) rvlist = [] abfmt = 1 # assume unknown format for numg in range(ngroups): try: startt, endt, site, rmode, rdir = pgroups[numg] except: #print 'access nonexistant pgroup',numg break prt = 0 if prt: print 'hunting', startt, endt, numg, ngroups, pgroups pernum = sumcalc.compute_end_period(fobj, startt, endt, prt=prt) if pernum == 0: if prt: print 'no pernum found', startt, endt, numg, pgroups[ numg], fobj.fname return # this is an invalid case so DON'T export anything ##break # should never happen continue rawin = 0 per = fobj.per[pernum - 1] if rmode == 'SingleB' or rmode == 'MonitorB': rawin = 1 cmin = 0 if rmode == 'MonitorAB': dobe = 2 cmin = 6 abfmt = 2 else: dobe = 1 for siteinx in range(dobe): rv = ResultValue() rv.name = fobj.ucode rv.date = fobj.datestring if siteinx == 0: try: rv.site = site[rawin] except: if rawin: rv.site = 'Unk B' else: rv.site = 'Unk A' else: try: rv.site = site[1] except: rv.site == 'Unk B' if rdir == '' or rdir == None: rdir = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] rv.mode = rmode + rdircodes[rdir[clist[0]]] if isminimap: if rmode == 'MonitorAB': if siteinx: # must process 1st 6 for site 0 and last 6 for site b offset = 6 else: offset = 0 count = 6 else: offset = 0 count = 12 else: offset = 0 count = len(clist) v = 0 for i in range(offset, offset + count): newfreq = per.freqtext[clist[i]] if newfreq.find('.') >= 0: # only have to fix fractional values fx = newfreq.split('-') if len(fx) == 2: vlow = fx[0].rstrip('0') vhigh = fx[1].rstrip('0') newfreq = vlow + '-' + vhigh rv.freq[v] = newfreq sdx = sumcalc.compute_sd_group(fobj, startt, endt, clist[i]) # sd,count,sumval,startt,endt,variance,aval if sdx[1] > 0: # if valid rv.avg[v] = sdx[6] rv.sd[v] = sdx[0] rv.seconds = sdx[1] # seconds of actual period data v += 1 rvlist.append(rv) # output the settings # filename is nameyymmdduu.csv in 'RESULTSPATH' dtval = fobj.datestring[2:4] + fobj.datestring[5:7] + fobj.datestring[ 8:10] + fobj.fname[-6:-4] uniq = int(fobj.fname[-6:-4]) if isminimap: destpath = edict[ 'RESULTSPATH'] + '\\' + fobj.ucode + '_' + dtval + '_mmap%s.csv' % abfmt else: destpath = edict[ 'RESULTSPATH'] + '\\' + fobj.ucode + '_' + dtval + '.csv' fd = open(destpath, "w") if isminimap: if header: fd.write('Name,date,unique,site') for k in range(len(rvlist[0].freq)): fd.write(',freq,avg,SD') fd.write('\n') #name,date,uniq,site,freq lo,freq hi,a,sd,freq lo,freq hi,a,sd .... for rv in rvlist: ln = '%s,%s,%d,%s' % (rv.name, rv.date, uniq, rv.site) fd.write(ln) for k in range(count): lx = ',%s,%5.1f,%4.1f' % (rv.freq[k], rv.avg[k], rv.sd[k]) fd.write(lx) fd.write('\n') else: while len(rvlist) < 2: rvlist.append(ResultValue()) # just so we don't blow up if header: fd.write( '"name",date,unique,A-site,A-Reward freq,A-top inhibit freq,A-2nd inhibit freq,A-seconds,A-reward mode' ) fd.write( ',B-site,B-Reward freq,B-top inhibit freq,B-2nd inhibit freq,B-seconds,B-reward mode' ) fd.write( ',A-reward avg,A-reward sd,A-top inhibit avg,A-top inhibit sd,A-2nd inhibit avg,A-2nd inhibit sd' ) fd.write( ',B-reward avg,B-reward sd,B-top inhibit avg,B-top inhibit sd,B-2nd inhibit avg,B-2nd inhibit sd' ) fd.write('\n') ln = '%s,%s,%d,%s,%s,%s,%s,%4.1f,%s,' % ( rvlist[0].name, rvlist[0].date, uniq, rvlist[0].site, rvlist[0].freq[0], rvlist[0].freq[1], rvlist[0].freq[2], rvlist[0].seconds / 60.0, rvlist[0].mode) fd.write(ln) ln = '%s,%s,%s,%s,%4.1f,%s,' % ( rvlist[1].site, rvlist[1].freq[0], rvlist[1].freq[1], rvlist[1].freq[2], rvlist[1].seconds / 60.0, rvlist[1].mode) fd.write(ln) ln = '%5.1f,%4.1f,%5.1f,%4.1f,%5.1f,%4.1f,' % ( rvlist[0].avg[0], rvlist[0].sd[0], rvlist[0].avg[1], rvlist[0].sd[1], rvlist[0].avg[2], rvlist[0].sd[2]) fd.write(ln) ln = '%5.1f,%4.1f,%5.1f,%4.1f,%5.1f,%4.1f\n' % ( rvlist[1].avg[0], rvlist[1].sd[0], rvlist[1].avg[1], rvlist[1].sd[1], rvlist[1].avg[2], rvlist[1].sd[2]) fd.write(ln) fd.close()