def doit(fname,options): fname = sys.argv[1] h5 = tables.openFile(fname,mode='r') stroke_data=h5.root.stroke_data[:] stroke_times = stroke_data['trigger_timestamp'] time_data=h5.root.time_data[:] gain,offset,resids = easy_decode.get_gain_offset_resids( input=time_data['framestamp'], output=time_data['timestamp']) top = h5.root.time_data.attrs.top wordstream = h5.root.ain_wordstream[:] wordstream = wordstream['word'] # extract into normal numpy array r=easy_decode.easy_decode(wordstream,gain,offset,top) if r is not None: chans = r.dtype.fields.keys() chans.sort() chans.remove('timestamps') if 0: Vcc = h5.root.ain_wordstream.attrs.Vcc print 'Vcc read from file at',Vcc else: Vcc=3.3 print 'Vcc',Vcc ADCmax = (2**10)-1 analog_gain = Vcc/ADCmax
def doit(fname, options): fname = sys.argv[1] h5 = tables.openFile(fname, mode='r') time_data = h5.root.time_data[:] gain, offset, resids = easy_decode.get_gain_offset_resids( input=time_data['framestamp'], output=time_data['timestamp']) if options.plot_time_data: fig = pylab.figure() ax = fig.add_subplot(111) ax.plot(time_data['framestamp'], time_data['timestamp'], 'bo', label='data') ax.set_xlabel('framestamp') ax.set_ylabel('timestamp') minx = np.min(time_data['framestamp']) maxx = np.max(time_data['framestamp']) ax.plot([minx, maxx], [minx * gain + offset, maxx * gain + offset], 'k-', label='fit') pylab.legend() top = h5.root.time_data.attrs.top wordstream = h5.root.ain_wordstream[:] wordstream = wordstream['word'] # extract into normal numpy array print 'wordstream.shape', wordstream.shape if wordstream.shape == (0, ): pylab.show() return pylab.figure() # open a new figure r = easy_decode.easy_decode(wordstream, gain, offset, top) chans = r.dtype.fields.keys() chans.sort() chans.remove('timestamps') names = h5.root.ain_wordstream.attrs.channel_names if hasattr(h5.root.ain_wordstream.attrs, 'Vcc'): Vcc = h5.root.ain_wordstream.attrs.Vcc print 'Vcc read from file at', Vcc else: Vcc = 3.3 print 'Vcc guessed at', Vcc ADCmax = (2**10) - 1 analog_gain = Vcc / ADCmax n_adc_samples = len(r['timestamps']) dt = r['timestamps'][1] - r['timestamps'][0] samps_per_sec = 1.0 / dt adc_duration = n_adc_samples * dt print '%d samples at %.1f samples/sec = %.1f seconds' % ( n_adc_samples, samps_per_sec, adc_duration) t0 = r['timestamps'][0] total_duration = adc_duration if options.timestamps: t_offset = 0 t_plot_start = t0 else: t_offset = t0 t_plot_start = 0 ax = None for i in range(len(chans)): ax = pylab.subplot(len(chans), 1, i + 1, sharex=ax) try: label = names[int(chans[i])] except Exception, err: print 'ERROR: ingnoring exception %s' % (err, ) label = 'channel %s' % chans[i] ax.plot(r['timestamps'] - t_offset, r[chans[i]] * analog_gain, label=label) ax.set_ylabel('V') ax.legend() if options.timestamps: ax.xaxis.set_major_formatter(mticker.FuncFormatter(format_date)) else: ax.xaxis.set_major_formatter(mticker.FormatStrFormatter("%s")) ax.yaxis.set_major_formatter(mticker.FormatStrFormatter("%s"))
def doit(fname,options): fname = sys.argv[1] h5 = tables.openFile(fname,mode='r') time_data=h5.root.time_data[:] gain,offset,resids = easy_decode.get_gain_offset_resids( input=time_data['framestamp'], output=time_data['timestamp']) if options.plot_time_data: fig = pylab.figure() ax = fig.add_subplot(111) ax.plot( time_data['framestamp'], time_data['timestamp'], 'bo', label='data' ) ax.set_xlabel('framestamp') ax.set_ylabel('timestamp') minx = np.min(time_data['framestamp']) maxx = np.max(time_data['framestamp']) ax.plot( [minx,maxx], [minx*gain+offset, maxx*gain+offset], 'k-', label='fit') pylab.legend() top = h5.root.time_data.attrs.top wordstream = h5.root.ain_wordstream[:] wordstream = wordstream['word'] # extract into normal numpy array print 'wordstream.shape',wordstream.shape if wordstream.shape == (0,): pylab.show() return pylab.figure() # open a new figure r=easy_decode.easy_decode(wordstream,gain,offset,top) chans = r.dtype.fields.keys() chans.sort() chans.remove('timestamps') names = h5.root.ain_wordstream.attrs.channel_names if hasattr(h5.root.ain_wordstream.attrs,'Vcc'): Vcc = h5.root.ain_wordstream.attrs.Vcc print 'Vcc read from file at',Vcc else: Vcc=3.3 print 'Vcc guessed at',Vcc ADCmax = (2**10)-1 analog_gain = Vcc/ADCmax n_adc_samples = len(r['timestamps']) dt = r['timestamps'][1]-r['timestamps'][0] samps_per_sec = 1.0/dt adc_duration = n_adc_samples*dt print '%d samples at %.1f samples/sec = %.1f seconds'%(n_adc_samples, samps_per_sec, adc_duration) t0 = r['timestamps'][0] total_duration = adc_duration if options.timestamps: t_offset = 0 t_plot_start = t0 else: t_offset = t0 t_plot_start = 0 ax=None for i in range(len(chans)): ax = pylab.subplot(len(chans),1,i+1,sharex=ax) try: label = names[int(chans[i])] except Exception, err: print 'ERROR: ingnoring exception %s'%(err,) label = 'channel %s'%chans[i] ax.plot(r['timestamps']-t_offset,r[chans[i]]*analog_gain, label=label) ax.set_ylabel('V') ax.legend() if options.timestamps: ax.xaxis.set_major_formatter( mticker.FuncFormatter(format_date)) else: ax.xaxis.set_major_formatter(mticker.FormatStrFormatter("%s")) ax.yaxis.set_major_formatter(mticker.FormatStrFormatter("%s"))
def main(): h5_file_dir = os.path.join(os.environ['HOME'], 'strokelitude_h5') h5_file_list = os.listdir(h5_file_dir) if not h5_file_list: print '%s directory empty -- nothing to do' % (h5_file_dir, ) sys.exit(0) # Create mat file directory if is doesn't exist mat_file_dir = os.path.join(os.environ['HOME'], 'strokelitude_mat') if not os.path.exists(mat_file_dir): os.mkdir(mat_file_dir) # Loop over input files for fname_h5 in h5_file_list: fname_base, fname_ext = os.path.splitext(fname_h5) if not fname_ext == '.h5': continue fname_mat = '%s.mat' % (fname_base, ) fname_mat_full = os.path.join(mat_file_dir, fname_mat) fname_h5_full = os.path.join(h5_file_dir, fname_h5) if os.path.exists(fname_mat_full): continue print 'converting: %s -- > %s' % (fname_h5, fname_mat) h5 = tables.openFile(fname_h5_full, mode='r') stroke_data = h5.root.stroke_data[:] stroke_times = stroke_data['trigger_timestamp'] time_data = h5.root.time_data[:] gain, offset, resids = easy_decode.get_gain_offset_resids( input=time_data['framestamp'], output=time_data['timestamp']) top = h5.root.time_data.attrs.top wordstream = h5.root.ain_wordstream[:] wordstream = wordstream['word'] # extract into normal numpy array r = easy_decode.easy_decode(wordstream, gain, offset, top) if r is not None: chans = r.dtype.fields.keys() chans.sort() chans.remove('timestamps') if 1: Vcc = h5.root.ain_wordstream.attrs.Vcc channel_names = h5.root.ain_wordstream.attrs.channel_names else: Vcc = 3.3 ADCmax = (2**10) - 1 analog_gain = Vcc / ADCmax else: chans = [] names = h5.root.ain_wordstream.attrs.channel_names savedict = {} if r is not None: t0 = r['timestamps'][0] savedict = {'ADC_timestamp': r['timestamps']} else: t0 = 0 # Write data to a .mat file savedict['data'] = {} savedict['data']['frame'] = stroke_data['frame'] savedict['data']['triggerTimeStamp'] = stroke_times savedict['data']['processingTimeStamp'] = stroke_data[ 'processing_timestamp'] savedict['data']['leftWingAngle'] = stroke_data['left'] savedict['data']['rightWingAngle'] = stroke_data['right'] savedict['data']['leftAntennaAngle'] = stroke_data['left_antenna'] savedict['data']['rightAntennaAngle'] = stroke_data['right_antenna'] savedict['data']['headAngle'] = stroke_data['head'] savedict['data']['pulseWidth'] = stroke_data['pulse_width'] savedict['data']['pulseFrame'] = stroke_data['pulse_frame'] if chans != []: analog_key_list = [] for i, name in enumerate(names): ADC_data = r[chans[i]] * analog_gain savedict["ADC" + str(name)] = ADC_data analog_key_list.append("ADC" + str(name)) scipy.io.savemat(fname_mat_full, savedict) h5.close()
def main(): h5_file_dir = os.path.join(os.environ['HOME'],'strokelitude_h5') h5_file_list = os.listdir(h5_file_dir) if not h5_file_list: print '%s directory empty -- nothing to do'%(h5_file_dir,) sys.exit(0) # Create mat file directory if is doesn't exist mat_file_dir = os.path.join(os.environ['HOME'],'strokelitude_mat') if not os.path.exists(mat_file_dir): os.mkdir(mat_file_dir) # Loop over input files for fname_h5 in h5_file_list: fname_base, fname_ext = os.path.splitext(fname_h5) if not fname_ext == '.h5': continue fname_mat = '%s.mat'%(fname_base,) fname_mat_full = os.path.join(mat_file_dir, fname_mat) fname_h5_full = os.path.join(h5_file_dir,fname_h5) if os.path.exists(fname_mat_full): continue print 'converting: %s -- > %s'%(fname_h5,fname_mat) h5 = tables.openFile(fname_h5_full,mode='r') stroke_data=h5.root.stroke_data[:] stroke_times = stroke_data['trigger_timestamp'] time_data=h5.root.time_data[:] gain,offset,resids = easy_decode.get_gain_offset_resids( input=time_data['framestamp'], output=time_data['timestamp']) top = h5.root.time_data.attrs.top wordstream = h5.root.ain_wordstream[:] wordstream = wordstream['word'] # extract into normal numpy array r=easy_decode.easy_decode(wordstream,gain,offset,top) if r is not None: chans = r.dtype.fields.keys() chans.sort() chans.remove('timestamps') if 1: Vcc = h5.root.ain_wordstream.attrs.Vcc channel_names = h5.root.ain_wordstream.attrs.channel_names else: Vcc=3.3 ADCmax = (2**10)-1 analog_gain = Vcc/ADCmax else: chans = [] names = h5.root.ain_wordstream.attrs.channel_names savedict = {} if r is not None: t0 = r['timestamps'][0] savedict = {'ADC_timestamp':r['timestamps']} else: t0 = 0 # Write data to a .mat file savedict['data'] = {} savedict['data']['frame'] = stroke_data['frame'] savedict['data']['triggerTimeStamp'] = stroke_times savedict['data']['processingTimeStamp'] = stroke_data['processing_timestamp'] savedict['data']['leftWingAngle'] = stroke_data['left'] savedict['data']['rightWingAngle'] = stroke_data['right'] savedict['data']['leftAntennaAngle'] = stroke_data['left_antenna'] savedict['data']['rightAntennaAngle'] = stroke_data['right_antenna'] savedict['data']['headAngle'] = stroke_data['head'] savedict['data']['pulseWidth'] = stroke_data['pulse_width'] savedict['data']['pulseFrame'] = stroke_data['pulse_frame'] if chans != []: analog_key_list = [] for i, name in enumerate(names): ADC_data = r[chans[i]]*analog_gain savedict["ADC"+str(name)] = ADC_data analog_key_list.append("ADC"+str(name)) scipy.io.savemat(fname_mat_full,savedict) h5.close()
h5 = tables.openFile(fname,mode='r') stroke_data=h5.root.stroke_data[:] stroke_times = stroke_data['trigger_timestamp'] print 'repr(stroke_times[0])',repr(stroke_times[0]) time_data=h5.root.time_data[:] gain,offset,resids = easy_decode.get_gain_offset_resids( input=time_data['framestamp'], output=time_data['timestamp']) top = h5.root.time_data.attrs.top wordstream = h5.root.ain_wordstream[:] wordstream = wordstream['word'] # extract into normal numpy array r=easy_decode.easy_decode(wordstream,gain,offset,top) if r is not None: chans = r.dtype.fields.keys() chans.sort() chans.remove('timestamps') if 1: Vcc = h5.root.ain_wordstream.attrs.Vcc channel_names = h5.root.ain_wordstream.attrs.channel_names else: Vcc=3.3 print 'Vcc',Vcc ADCmax = (2**10)-1 analog_gain = Vcc/ADCmax else: chans = []
fname = args[0] h5 = tables.openFile(fname, mode='r') stroke_data = h5.root.stroke_data[:] stroke_times = stroke_data['trigger_timestamp'] print 'repr(stroke_times[0])', repr(stroke_times[0]) time_data = h5.root.time_data[:] gain, offset, resids = easy_decode.get_gain_offset_resids( input=time_data['framestamp'], output=time_data['timestamp']) top = h5.root.time_data.attrs.top wordstream = h5.root.ain_wordstream[:] wordstream = wordstream['word'] # extract into normal numpy array r = easy_decode.easy_decode(wordstream, gain, offset, top) if r is not None: chans = r.dtype.fields.keys() chans.sort() chans.remove('timestamps') if 1: Vcc = h5.root.ain_wordstream.attrs.Vcc channel_names = h5.root.ain_wordstream.attrs.channel_names else: Vcc = 3.3 print 'Vcc', Vcc ADCmax = (2**10) - 1 analog_gain = Vcc / ADCmax else: chans = []