def convert_to_df(self, debug=False): '''This function convert the content of the Files into a list. Each element of the list is a pandas dataframe with two columns: x,y. Input parameters: debug: a boolean. If False no debug text is printed. If True then debug informations are printed. data_files: a list with the list of the file names. Typically this is the value returned by the function generate_file_list(). Return Value: 1. A pandas dataframe with 3 columns: 'data', 'groupName', and 'channelName'. 2. the number of channels as integer.''' df = pd.DataFrame() data_files = [x for x in os.listdir(self.path) if x.endswith(".tdms")] for filename in data_files: tdms_file = TdmsFile(self.path + '/' + filename) if (debug): print("The following Groups and Channels are available:") for group in tdms_file.groups(): print(group) for channel in tdms_file.group_channels(group): print(channel) s1 = pd.Series(tdms_file.object('Reference', 'Ramp_Output').data) # This DataFrame will contain the data and the name of group and # Channel. for group in tdms_file.groups(): if (str(group) != 'Reference'): for channel in tdms_file.group_channels(group): channelName = TDMSConverter.get_channel_name( self, channel) if (debug): print(">>>", str(group), '--', channelName) s2 = pd.Series( tdms_file.object(str(group), channelName).data) df_data = pd.concat([s1, s2], axis=1) df_data.columns = ['x', 'y'] df_tmp = pd.DataFrame({ "data": [df_data], "groupName": [str(group)], "channelName": [channelName], "filename": [self.path + filename] }) df = df.append(df_tmp) return df, df.shape[0]
def cut_log_spectra(fileinpaths, times, fileoutpaths_list, **kwargs): for i, fileinpath in enumerate(fileinpaths): fileoutpaths = fileoutpaths_list[i] tdmsfile = TF(fileinpath) for j, t in enumerate(times): fileoutpath = fileoutpaths[j] direc = os.path.split(fileoutpath)[0] if not os.path.exists(direc): os.makedirs(direc) root_object = RootObject(properties={}) try: with TdmsWriter(fileoutpath, mode='w') as tdms_writer: timedata = [ dt64(y) for y in tdmsfile.channel_data('Global', 'Time') ] idx1, idx2 = _get_indextime(timedata, t[0], t[1]) if idx1 == idx2: pass else: for group in tdmsfile.groups(): group_object = GroupObject(group, properties={}) if group == "Global": for channel in tdmsfile.group_channels(group): if channel.channel == 'Wavelength': channel_object = ChannelObject( channel.group, channel.channel, channel.data) else: channel_object = ChannelObject( channel.group, channel.channel, channel.data[idx1:idx2]) tdms_writer.write_segment([ root_object, group_object, channel_object ]) else: for channel_object in tdmsfile.group_channels( group)[idx1:idx2]: tdms_writer.write_segment([ root_object, group_object, channel_object ]) except ValueError as error: print(error) print('removing the file at: \n', fileoutpath) os.remove(fileoutpath)
def cut_log_spectra(fileinpaths, times, fileoutpaths_list, **kwargs): for i, fileinpath in enumerate(fileinpaths): fileoutpaths = fileoutpaths_list[i] tdmsfile = TF(fileinpath) for j, t in enumerate(times): fileoutpath = fileoutpaths[j] direc = os.path.split(fileoutpath)[0] if not os.path.exists(direc): os.makedirs(direc) root_object = RootObject(properties={}) try: with TdmsWriter(fileoutpath, mode='w') as tdmswriter: for group in tdmsfile.groups().remove('Global'): idx1, idx2 = _get_indextime(timedate, t[0], t[1]) for channel in file.group_channels(group)[idx1, idx2]: tdms_writer.write_segment([root_object, channel]) for channel in tdmsfile.group_channels('Global'): if channel.channel == 'Wavelength': channel_object = channel else: channel_object = _cut_channel(channel, time[0], time[1], timedata=None) tdms_writer.write_segment( [root_object, channel_object]) except ValueError as error: print(error) print('removing the file at: \n', fileoutpath) os.remove(fileoutpath)
def f_open_tdms_2(filename): if filename == 'Input': filename = filedialog.askopenfilename() tdms_file = TdmsFile(filename) group_names = tdms_file.groups() # print('groups') # print(group_names) channel_object = tdms_file.group_channels(group_names[0]) # print('channel') # print(channel_object) channel_name = channel_object[0].channel # print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!') # print(channel_name) # print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!') data = tdms_file.channel_data(group_names[0], channel_name) # channel_object = tdms_file('Thiele_Versuche', 'AE Signal') # group_name = file.groups() # print(group_name[0]) # channel_name = file.group_channels(group_name[0]) # print(channel_name[0]) # canal = file.object(group_name[0], 'AE Signal') # group_name = file.groups() # group_name = group_name[0] # data = file.object('Thiele_Versuche', 'AE Signal') # data = file.channel_data(group_name[0], 'AE Signal') return data
def import_tdmsfile_to_tempodb(file_path, series_key_base=None): # Parse the TDMS file and get a handle to the object tdmsfile = TdmsFile(file_path) # Logging options show_properties = True show_data = False show_time = False import_data = True count = 0 level = 0 root = tdmsfile.object() display('/', level) if show_properties: display_properties(root, level) for group in tdmsfile.groups(): level = 1 group_obj = tdmsfile.object(group) display("%s" % group_obj.path, level) if show_properties: display_properties(group_obj, level) for channel in tdmsfile.group_channels(group): level = 2 display("%s" % channel.path, level) if show_properties: level = 3 display("data type: %s" % channel.data_type.name, level) display_properties(channel, level) if show_data: level = 3 data = channel.data display("data: %s" % data, level) if show_time: level = 3 time = channel.time_track() display("time: %s" % time, level) if import_data: level = 3 try: if series_key_base: series_key = "%s-%i" % (series_key_base, count) count += 1 # "Paul-Python-TDMS-1" else: # series_key_base = "%s-%s-%s" % os.path.basename(os.path.splitext(file_path))[0], group_obj. series_key = channel.path import_channel_to_tempodb(channel, series_key) except KeyError as ke: display("There is no embedded time data in this channel.", level) print ke print print
def import_tdmsfile_to_tempodb(file_path, series_key_base=None): # Parse the TDMS file and get a handle to the object tdmsfile = TdmsFile(file_path) # Logging options show_properties = True show_data = False show_time = False import_data = True count = 0 level = 0 root = tdmsfile.object() display('/', level) if show_properties: display_properties(root, level) for group in tdmsfile.groups(): level = 1 group_obj = tdmsfile.object(group) display("%s" % group_obj.path, level) if show_properties: display_properties(group_obj, level) for channel in tdmsfile.group_channels(group): level = 2 display("%s" % channel.path, level) if show_properties: level = 3 display("data type: %s" % channel.data_type.name, level) display_properties(channel, level) if show_data: level = 3 data = channel.data display("data: %s" % data, level) if show_time: level = 3 time = channel.time_track() display("time: %s" % time, level) if import_data: level = 3 try: if series_key_base: series_key = "%s-%i" % (series_key_base, count) count += 1 # "Paul-Python-TDMS-1" else: # series_key_base = "%s-%s-%s" % os.path.basename(os.path.splitext(file_path))[0], group_obj. series_key = channel.path import_channel_to_tempodb(channel, series_key) except KeyError as ke: display("There is no embedded time data in this channel.", level) print ke print print
def read_tdms(fn): tdms_file = TdmsFile(fn) try: times = np.array( [[channel.data for channel in tdms_file.group_channels(group) if channel.data is not None] for group in tdms_file.groups() if group.lower() == "time"][0][0]) dt = np.mean(np.diff(times)) except IndexError: if not "Sampling Rate" in tdms_file.object().properties.keys(): if not "Sampling Rate(AI)" in tdms_file.object().properties.keys(): dt = 1.0 else: sr = float(tdms_file.object().properties['Sampling Rate(AI)']) if sr > 0: dt = 1e3/sr else: dt = 1.0/25.0 else: sr = float(tdms_file.object().properties['Sampling Rate']) if sr > 0: dt = 1e3/sr else: dt = 1.0/25.0 yunits = tdms_file.object().properties['Units'] try: meta = tdms_file.group_channels('Meta') except: meta = '' recording = {group: [ channel.data for channel in tdms_file.group_channels(group) if channel.data is not None] for group in tdms_file.groups()} recording["dt"] = dt recording["yunits"] = yunits recording["holding"] = meta return recording
def plot(self): ''' plot some random stuff ''' # random data # data = [random.random() for i in range(10)] # tdms data tdms_file = TdmsFile("t1.tdms") tdms_groups = tdms_file.groups() data_array = [] for grp in tdms_groups: for ch in reversed(tdms_file.group_channels(grp)): temp = str(ch).split('\'') # print((temp[1]), (temp[3]), ch) temp_obj = tdms_file.object(temp[1], temp[3]) data_array.append(temp_obj.data) # ax = self.figure.add_subplot() # ax.clear() # ax.plot(temp_obj.data) # # getattr(self, "self.canvas%s.draw" % str(len(data_array)))() # self.canvas1.draw() data_array = np.asarray(data_array) ax1 = self.figure.add_subplot() ax1.clear() ax1.plot(data_array[0]) self.canvas1.draw() ax2 = self.figure.add_subplot() ax2.clear() ax2.plot(data_array[1]) self.canvas2.draw() ax3 = self.figure.add_subplot() ax3.clear() ax3.plot(data_array[2]) self.canvas3.draw() ax4 = self.figure.add_subplot() ax4.clear() ax4.plot(data_array[3]) self.canvas4.draw() ax5 = self.figure.add_subplot() ax5.clear() ax5.plot(data_array[4]) self.canvas5.draw() ax6 = self.figure.add_subplot() ax6.clear() ax6.plot(data_array[5]) self.canvas6.draw() ax7 = self.figure.add_subplot() ax7.clear() ax7.plot(data_array[6]) self.canvas7.draw()
def getData(filePath): print("Start getData") SPM = [] tdms_file = TdmsFile(filePath) #tdms_file = TdmsFile("Analogslow.tdms") #tdms_file = TdmsFile("fourChannelSineWave.tdms") for group in tdms_file.groups(): for channel in tdms_file.group_channels(group): SPM.append(channel.data) #print(len(channel.data))'' return SPM
def tdms_info(tdmsName): tdms_file = TdmsFile(tdmsName) # グループ名をすべて取得 groupName = tdms_file.groups() # チャンネル名をすべて取得 channelName = [] for g in groupName: channelName.append(tdms_file.group_channels(g)) return groupName, channelName
def read(self): # File information below tdms_file = TdmsFile(fname + '.tdms') # Reads a tdms file. root_object = tdms_file.object() # tdms file information for name, value in root_object.properties.items(): print("{0}: {1}".format(name, value)) group_name = "Trap" # Get the group name channels = tdms_file.group_channels( group_name) # Get the channel object self.channel_num = len(channels) # Channel number self.channel_name = [ str(channels[i].channel) for i in range(len(channels)) ] # Channel name self.dt = channels[0].properties[u'wf_increment'] # Sampling time self.fs = int(1.0 / self.dt) # Sampling frequency self.N = len(channels[0].time_track()) print("Channel number: %d" % self.channel_num) print("Channel name: %s" % self.channel_name) print("Sampling rate: %d Hz" % self.fs) print("Data size: %d sec \n" % int(self.N * self.dt)) # Read data print("Reading raw data ... \n") self.t = channels[0].time_track() self.QPDx = (channels[0].data - np.mean(channels[0].data)) * QPD_nm2V[0] self.QPDy = (channels[1].data - np.mean(channels[1].data)) * QPD_nm2V[1] self.QPDs = channels[2].data self.PZTx = (channels[3].data - np.mean(channels[3].data)) * PZT_nm2V[0] self.PZTy = (channels[4].data - np.mean(channels[4].data)) * PZT_nm2V[1] self.PZTz = (channels[5].data - np.mean(channels[5].data)) * PZT_nm2V[2] self.MTAx = (channels[6].data - channels[6].data[0]) * MTA_nm2V[0] self.MTAy = (channels[7].data - channels[7].data[0]) * MTA_nm2V[1] self.Fx = self.QPDx * stiffness_pN2nm[0] self.Fy = self.QPDy * stiffness_pN2nm[1] # Make a directory to save the results self.path_data = os.getcwd() self.path_save = os.path.join(self.path_data, fname) if os.path.exists(self.path_save): shutil.rmtree(self.path_save) os.makedirs(self.path_save) else: os.makedirs(self.path_save)
def _parseFile(self): if self.filename.lower().endswith('.tdms'): tdms = TdmsFile(self.filename) self.time = {} self.data = {} self.groups = tdms.groups() self.channels = {} for g in self.groups: self.time[g] = {} self.data[g] = {} self.channels[g] = tdms.group_channels(g) for c in self.channels[g]: if c.has_data: props = c.properties self.time[g][props["NI_ChannelName"]] = c.time_track() self.data[g][props["NI_ChannelName"]] = c.data elif self.filename.lower().endswith('.txt'): fid = open(self.filename, "r") if "<Mach-1 File>" in fid.readline(): contents = fid.readlines() fid.close() self.time = OrderedDict() self.data = OrderedDict() self.channels = OrderedDict() info_blocks = [ i for i, j in izip(count(), contents) if "<INFO>" in j or "<END INFO>" in j ] info_blocks = izip(islice(info_blocks, 0, None, 2), islice(info_blocks, 1, None, 2)) data_blocks = [ i for i, j in izip(count(), contents) if "<DATA>" in j or "<END DATA>" in j ] data_blocks = izip(islice(data_blocks, 0, None, 2), islice(data_blocks, 1, None, 2)) self.groups = range(1, len(list(info_blocks)) + 1) for i, ind in enumerate(data_blocks): g = self.groups[i] header = contents[ind[0] + 1].rstrip("\r\n").split("\t") self.channels[g] = header data = contents[ind[0] + 2:ind[1]] for j, d in enumerate(data): data[j] = d.rstrip("\r\n").split("\t") data = np.array(data, float) self.time[g] = OrderedDict() self.data[g] = OrderedDict() for j, c in enumerate(self.channels[g][1:]): self.time[g][c] = data[:, 0] self.data[g][c] = data[:, j + 1]
def read_tdms(filename, A_scan_num, B_scan_num): ## Read the TDMS data from the PS-OCT system and write to a C-Scan matrix. ## inputs filename = TDMS file location, A_scan_num is the number of A_Scans in each B_Scan and B_scan_num is the number of B_Scans in the C_Scan. tdms_file = TdmsFile(filename) ##import the data as a TDMS data = tdms_file.object('Untitled', tdms_file.group_channels('Untitled') [0].channel).data ## Extract the data A_scan_length = int(len(data) / B_scan_num / A_scan_num) # calculate A_scan length data.resize((B_scan_num, A_scan_num, A_scan_length)) C_scan = np.array(data) del data return (C_scan)
def tdms_to_nparr(tdms_name, time_req=False): tdms_file = TdmsFile(tdms_name) group_name = tdms_file.groups() channel_name = tdms_file.group_channels(group_name[0]) channel = tdms_file.object(group_name[0], channel_name[0].channel) data = channel.data.astype(np.float32) if time_req == True: time = channel.time_track() return data, time else: return data
def read_tdms(fn): tdms_file = TdmsFile(fn) t1 = tdms_file.groups() t2 = tdms_file.group_channels(t1[0]) t2_0 = t2[0].channel t2_1 = t2[1].channel channel1 = tdms_file.object(t1[0], t2_0) channel2 = tdms_file.object(t1[0], t2_1) sam = channel1.property('wf_samples') current = channel1.data voltage = channel2.data time = channel1.time_track() data = np.array((time, current * 500, voltage)) return data, sam
def read(self): # File information below tdms_file = TdmsFile(self.fname+'.tdms') # Reads a tdms file. group_name = "Trap" # Get the group name channels = tdms_file.group_channels(group_name) # Get the channel object self.ch = np.zeros((len(channels), N_total)) # Make a 2D array (ch, timetrace) for trap data for i, channel in enumerate(channels): self.ch[i,] = channel.data[range(N_total)] if self.axis == 'X': self.x = self.ch[0] else: self.x = self.ch[1] self.x = self.x - np.mean(self.x)
def tdms_to_json(directory, target_directory, name): ''' Parses the TDMS file name located in the directory to json file named like the tdms file ''' print("Parsing: " + directory + name + ".tdms") # load in data, take the TDMS data type as example tdms_file = TdmsFile(directory + name + ".tdms") groups = tdms_file.groups() df_list = [] #df_test = pd.DataFrame() df_list.append(pd.DataFrame()) for group in groups: print(group) for channel in tdms_file.group_channels(group): try: chan_name = str(channel).split("'/'")[1].replace("'>", "") data = tdms_file.channel_data(group, chan_name) chan_name = chan_name.replace("(", " ").replace( ")", " ").replace(" ", " ").replace(" ", "_") df_test = pd.DataFrame() #print(chan_name) # print(data) #time.sleep(0.5) df_test[chan_name] = chan_name #data df_list[-1][chan_name] = chan_name #data #df_test.to_json(target_directory + chan_name + ".json", orient='split') # print(chan_name) # print(data) #df_test.to_json(target_directory + chan_name + ".json", orient='split') #df_test.to_json(target_directory + chan_name + '_' + name + ".json", orient='split') with open( target_directory + chan_name + '_' + name + ".json", 'w') as fp: json.dump({chan_name: data.tolist()}, fp) except: print("An Error Occured at: X !") continue
def convert_tdms(fileName, tempo, env): if tempo: time.sleep(20) path = env.path tdms_file = TdmsFile(os.path.join(path, fileName + '.tdms')) # tdms_file=TdmsFile(r'D:\DATA\00838_Data.tdms') hdf5 = h5py.File(envG.H5path + fileName + '.h5', 'w') #channel=tdms_file.object('PXI M6251','Lang_U') #group=tdms_file.object('PXI M6251') grouplist = tdms_file.groups() #print grouplist for i in grouplist: group = tdms_file.object(i) grouph = hdf5.create_group(i) print group.path if group.path == '/\'PXI M6251\'': nbchannels = group.properties['Nchannel'] tstart = group.properties['Tstart'] sampling = group.properties['SampleTime'] if group.path == '/\'Tektronix\'': tstart = group.properties['Tstart'] #sampling=group.properties['SampleTime'] sampling = 1 / 1.25e9 nbchannels = group.properties['Nchannel'] if group.path == '/\'S7\'': nbchannels = group.properties['Nchannel'] tstart = 0. sampling = 1. #print nbchannels,tstart,sampling grouph.attrs['Nchannel'] = nbchannels grouph.attrs['Tstart'] = tstart grouph.attrs['sampling'] = 1 / float(sampling) liste = tdms_file.group_channels(i) for j in liste: grouph.create_dataset(re.sub('[\']', '', j.path), data=j.data, compression="gzip") # conn=sqlite3.connect('ishtar') # curs=conn.cursor() # curs.execute('insert into shots values(?,?,?,?,?)',(int(fileName[0:-5]),fileName,0.,0.,0.)) # conn.commit() hdf5.create_group('Process') hdf5.close() return 1
def read_cal_TDMS(tdms_filename, porttype='2-port'): from nptdms import TdmsFile tdms_file = TdmsFile(tdms_filename) ## list groups #print(tdms_file.groups()) ## scan properties #print(tdms_file.object('Parameters').properties) ## 2-port data twoport = tdms_file.group_channels('2-port') cal_paramsR = {} cal_paramsI = {} for idx in range(0, len(twoport)): tmpvar = twoport[idx] path = [ item.replace('\'', '') for item in tmpvar.path.split('/') if item != '' ] name = path[len(path) - 1].split('_') term = name[0] realORimag = name[1] values = tmpvar.data bins = tmpvar.time_track() if realORimag == 'X': cal_paramsR[term] = values elif realORimag == 'Y': cal_paramsI[term] = values else: raise ('ParseNameError') # # cal_params = {} for each in cal_paramsR.keys(): cal_params[each] = cal_paramsR[each] + 1j * cal_paramsI[each] # #from collections import namedtuple #return namedtuple('GenericDict', cal_params.keys())(**cal_params) return cal_params
def read(self): # File information below tdms_file = TdmsFile(self.fname+'.tdms') # Reads a tdms file. group_name = "Trap" # Get the group name channels = tdms_file.group_channels(group_name) # Get the channel object self.ch = np.zeros((len(channels), self.N_total)) # Make a 2D array (ch, timetrace) for trap data for i, channel in enumerate(channels): self.ch[i,] = channel.data[range(self.N_total)] self.QPDx = self.ch[0] - np.mean(self.ch[0]) self.QPDy = self.ch[1] - np.mean(self.ch[1]) self.QPDs = self.ch[2] self.PZTx = (self.ch[3] - np.mean(self.ch[3])) * PZT_nm2V[0] self.PZTy = (self.ch[4] - np.mean(self.ch[4])) * PZT_nm2V[1] self.QPDx = self.QPDx/self.QPDs self.QPDy = self.QPDy/self.QPDs
def convert_tdms(fileName,tempo,env): if tempo: time.sleep(20) path=env.path tdms_file=TdmsFile(os.path.join(path,fileName+'.tdms')) # tdms_file=TdmsFile(r'D:\DATA\00838_Data.tdms') hdf5=h5py.File(path+os.sep+fileName+'.h5','w') #channel=tdms_file.object('PXI M6251','Lang_U') #group=tdms_file.object('PXI M6251') grouplist=tdms_file.groups() #print grouplist for i in grouplist: group=tdms_file.object(i) grouph=hdf5.create_group(i) print group.path if group.path=='/\'PXI M6251\'': nbchannels=group.properties['Nchannel'] tstart=group.properties['Tstart'] sampling=group.properties['SampleTime'] if group.path=='/\'Tektronix\'': tstart=group.properties['Tstart'] #sampling=group.properties['SampleTime'] sampling=1/1.25e9 nbchannels=group.properties['Nchannel'] if group.path=='/\'S7\'': nbchannels=group.properties['Nchannel'] tstart=0. sampling=1. #print nbchannels,tstart,sampling grouph.attrs['Nchannel']=nbchannels grouph.attrs['Tstart']=tstart grouph.attrs['sampling']=1/float(sampling) liste=tdms_file.group_channels(i) for j in liste: grouph.create_dataset(re.sub('[\']','',j.path),data=j.data,compression="gzip") # conn=sqlite3.connect('ishtar') # curs=conn.cursor() # curs.execute('insert into shots values(?,?,?,?,?)',(int(fileName[0:-5]),fileName,0.,0.,0.)) # conn.commit() hdf5.create_group('Process') hdf5.close() env.process.addFile(fileName)
def readTDMS(path, Source=None): from nptdms import TdmsFile from datetime import datetime from os import sep import pytz try: tdms_file = TdmsFile(path) except: logger.warning("Failed to open {}".format(path)) Signals = [] return Signals if Source == None: Source = getSource(tdms_file, path) Groups = tdms_file.groups() Signals = [] for i in range(0, len(Groups)): Channel_lst = tdms_file.group_channels(Groups[i]) for j in range(0, len(Channel_lst)): Name = str(Channel_lst[j]).split("/")[2][1:-2] if "unit_string" in Channel_lst[j].properties: Unit = Channel_lst[j].properties["unit_string"] else: Unit = "" Data = tdms_file.object(Groups[i], Name) Fs = 1 / Channel_lst[j].properties["wf_increment"] timestampstr = path[-20:-5] # timestamp is start of measurement timestamp = datetime.strptime(timestampstr, "%Y%m%d_%H%M%S") timestamp = pytz.utc.localize(timestamp) Signals.append( Signal(Source, Groups[i], Fs, Data.data, Name, Unit, timestamp) ) # %% Quality check of Signals signalLengths = [] for signal in Signals: length = signal.data.shape[0] if length == 0: Signals.remove(signal) else: signalLengths.append(length) return Signals
def cut_powermeter(fileinpaths, times, fileoutpaths_list, **kwargs): """Cut up a power meter tdms file based on input times.""" localtz = tzlocal.get_localzone() for i in range(len(fileinpaths)): fileinpath = fileinpaths[i] fileoutpaths = fileoutpaths_list[i] tdmsfile = TF(fileinpath) for j in range(len(times)): time1 = times[j][0].astype('O') time1 = time1.replace(tzinfo=pytz.utc) #convert to datetime time2 = times[j][1].astype('O') time2 = time2.replace(tzinfo=pytz.utc) fileoutpath = fileoutpaths[j] direc = os.path.split(fileoutpath)[0] if not os.path.exists(direc): os.makedirs(direc) root_object = RootObject(properties={ #TODO root properties }) try: with TdmsWriter(fileoutpath, mode='w') as tdms_writer: for group in tdmsfile.groups(): timedata = tdmsfile.channel_data(group, 'Time_LV') for channel in tdmsfile.group_channels(group): if type(channel.data_type.size) == type(None): break #skips over non numeric channels channel_object = _cut_channel(channel, time1, time2, timedata=timedata) tdms_writer.write_segment( [root_object, channel_object]) timechannel = tdmsfile.object(group, 'Time_LV') timechannel_cut = _cut_datetime_channel( timechannel, time1, time2) tdms_writer.write_segment( [root_object, timechannel_cut]) except ValueError as error: print(error) print('removing the file at: \n', fileoutpath) os.remove(fileoutpath)
def read_cal_TDMS(tdms_filename, porttype='2-port'): from nptdms import TdmsFile tdms_file = TdmsFile(tdms_filename) ## list groups #print(tdms_file.groups()) ## scan properties #print(tdms_file.object('Parameters').properties) ## 2-port data twoport = tdms_file.group_channels('2-port') cal_paramsR = {} cal_paramsI = {} for idx in range(0,len(twoport)): tmpvar = twoport[idx] path = [item.replace('\'', '') for item in tmpvar.path.split('/') if item!=''] name = path[len(path)-1].split('_') term = name[0] realORimag = name[1] values = tmpvar.data bins = tmpvar.time_track() if realORimag=='X': cal_paramsR[term] = values elif realORimag=='Y': cal_paramsI[term] = values else: raise('ParseNameError') # # cal_params = {} for each in cal_paramsR.keys(): cal_params[each] = cal_paramsR[each] + 1j*cal_paramsI[each] # #from collections import namedtuple #return namedtuple('GenericDict', cal_params.keys())(**cal_params) return cal_params
def _parseFile(self): if self.filename.lower().endswith('.tdms'): tdms = TdmsFile(self.filename) self.time = {} self.data = {} self.groups = tdms.groups() self.channels = {} for g in self.groups: self.time[g] = {} self.data[g] = {} self.channels[g] = tdms.group_channels(g) for c in self.channels[g]: if c.has_data: props = c.properties self.time[g][props["NI_ChannelName"]] = c.time_track() self.data[g][props["NI_ChannelName"]] = c.data elif self.filename.lower().endswith('.txt'): fid = open(self.filename, "r") if "<Mach-1 File>" in fid.readline(): contents = fid.readlines() fid.close() self.time = OrderedDict() self.data = OrderedDict() self.channels = OrderedDict() info_blocks = [i for i, j in izip(count(), contents) if "<INFO>" in j or "<END INFO>" in j] info_blocks = izip(islice(info_blocks, 0, None, 2), islice(info_blocks, 1, None, 2)) data_blocks = [i for i, j in izip(count(), contents) if "<DATA>" in j or "<END DATA>" in j] data_blocks = izip(islice(data_blocks, 0, None, 2), islice(data_blocks, 1, None, 2)) self.groups = range(1, len(list(info_blocks))+1) for i, ind in enumerate(data_blocks): g = self.groups[i] header = contents[ind[0]+1].rstrip("\r\n").split("\t") self.channels[g] = header data = contents[ind[0]+2:ind[1]] for j, d in enumerate(data): data[j] = d.rstrip("\r\n").split("\t") data = np.array(data, float) self.time[g] = OrderedDict() self.data[g] = OrderedDict() for j, c in enumerate(self.channels[g][1:]): self.time[g][c] = data[:, 0] self.data[g][c] = data[:, j+1]
def testTdmsFile(): tdms = TdmsFile( "/Volumes/RAID-0/LockheedMartin/TDMS_200120_12-40_2020-01-20 ATRQ Build 2/Slice00122.tdms" ) # tdms.as_hdf('/tmp/Slice00122.h5') properties = tdms.properties for property in properties: print(f'PROPERTY: {property} = {properties[property]}') objects = tdms.objects for obj in objects: print(f'OBJECT: {obj}') groups = tdms.groups() for part in groups: print(f'GROUP: {part}') # get the data from each group's channel and make a CSV channels = tdms.group_channels(part) # make a 2D array, and populate it with the arrays in this loop. groupCSV = [] areaCol = [] xCol = [] yCol = [] paramCol = [] intensityCol = [] laserCol = [] csvCount = 0 # copy each channel's data to its respective frame for channel in channels: print(f' CHANNEL: {channel}') names = [] for i in channels: wordList = str(i).split("/") name = wordList[-1] name = name.strip(">") name = name.strip("'") names.append(name) colNames = names
def load_diode_data(diode_data_file, apply_lowpass): """ Loads in diode data from a tdms file given the location Parameters ---------- diode_data_file : str Location of diode data file apply_lowpass : bool Determines whether or not a lowpass filter is applied Returns ------- pandas.core.frame.DataFrame Dataframe of diode data, with time stamps removed if they are present """ # import data tf = TdmsFile(diode_data_file) diode_channels = tf.group_channels("diodes") if len(diode_channels) == 0 or \ len(diode_channels[0].data) > 0: # diodes.tdms has data data = TdmsFile(diode_data_file).as_dataframe() for key in data.keys(): # remove time column if "diode" not in key.replace("diodes", "").lower(): data = data.drop(key, axis=1) if apply_lowpass: # noinspection PyTypeChecker data = data.apply(_diode_filter) else: # empty tdms file data = pd.DataFrame(columns=[c.path for c in diode_channels], data=np.array([[np.NaN] * 50 for _ in diode_channels]).T) return data
def cut_log_file(fileinpaths, times, fileoutpaths_list, **kwargs): """ Cuts up a log file based on the supplied times. This function assumes that the channels are waveforms. """ for i, fileinpath in enumerate(fileinpaths): fileoutpaths = fileoutpaths_list[i] tdmsfile = TF(fileinpath) for j in range(len(times)): time1 = times[j][0] time2 = times[j][1] fileoutpath = fileoutpaths[j] direc = os.path.split(fileoutpath)[0] if not os.path.exists(direc): os.makedirs(direc) root_object = RootObject(properties={ #TODO root properties }) try: with TdmsWriter(fileoutpath, mode='w') as tdms_writer: for group in tdmsfile.groups(): for channel in tdmsfile.group_channels(group): channel_object = _cut_channel(channel, time1, time2, timedata=None) tdms_writer.write_segment( [root_object, channel_object]) except ValueError as error: print(error) print('removing the file at: \n', fileoutpath) os.remove(fileoutpath)
def _getChannelInfo(self) -> [[object]]: #Get channel's informations from tdms file #load units from units.json file f = open("units.json", 'r') units = json.load(f) f.close() fileInfo = QFileInfo(self._path) if not fileInfo.exists(): return [[]] tdms = TdmsFile(self._path) group = tdms.groups()[0] #There is only one data group in TdmsFile chnObjs = tdms.group_channels(group) chnInfos = [] for chnObj in chnObjs: properties = chnObj.properties chnName = chnObj.channel try: chnUnit = properties['NI_UnitDescription'] except KeyError: chnUnit = "unknown" try: chnType = units[chnUnit]["number"] chnUnitDesc = units[chnUnit]["unit_desc"] except KeyError: chnType = 0 chnUnitDesc = "unknown" chnInfo = [chnName, chnType, chnUnit, chnUnitDesc] chnInfos.append(chnInfo) return chnInfos
def get_stimuli(tdms_path, save_path, fps, sampling_rate, duration): #Load TDMS file tdms_file = TdmsFile(tdms_path) #Plot save and show Photodiode channel photodiode_raw = tdms_file.group_channels('Photodiode')[0].data onsets = np.where(np.diff(photodiode_raw) > .5)[0] signal_duration = (duration*sampling_rate) stimuli = [onsets[0]] for i in onsets: if i - stimuli[-1] > signal_duration: stimuli.append(i) print(stimuli) f, ax = plt.subplots() ax.plot(photodiode_raw, color="r", lw=1) signal = np.zeros(len(photodiode_raw)) for i in stimuli: signal[i] = 1 ax.plot(signal) plt.show() plt.savefig(save_path) return stimuli
class TDMS_dj(object): def __init__(self, tdms_path): self.tdms_path = tdms_path self.file_name = path_leaf(self.tdms_path) self.file_dir = path_dir(self.tdms_path) self.tdms_file = TdmsFile(tdms_path) def get_groups(self): self.group_lst = self.tdms_file.groups() return self.group_lst def get_channels(self, grp_ind): self.chans_lst = self.tdms_file.group_channels(grp_ind) self.chans_lstout = [] for ch in self.chans_lst: ch = str(ch) ch = ch[ch.find("'/'") + 3:] if ch.find("\t") < 0: ch = ch[:ch.find("'>")] else: ch = ch[:ch.find("\t")] self.chans_lstout.append(ch) return self.chans_lstout def get_all_grps_chnls(self): self.grps = self.get_groups() self.grps_chans_dict = {} for grp in self.grps: self.grps_chans_dict[grp] = self.get_channels(grp) return self.grps_chans_dict def get_data(self, group, channel): return self.tdms_file.object(group, channel).data def conv_to_hdf(self): self.hdf_path = path_join_hdf(self.file_dir, self.file_name) self.tdms_file.as_hdf(self.hdf_path, mode='w', group='/')
return (newx,newy) # reading TDMS file filename = sys.argv[1] datafile = TdmsFile(filename) # TdmsFile is a function from the library/class npTDMS?? # get the group names print filename list_of_groups = datafile.groups() # groups is a function from npTDMS, what returns the names of the groups # print list_of_groups[0] it's only possible to print element 0, so the list has only one element number_of_groups = len(list_of_groups) # print number_of_groups this gives a "1", so there is only one group for groupname in list_of_groups: print groupname # the groupname is "data"; it means to print every element of a list list_of_channels = datafile.group_channels(groupname) # group channels is a function from npTDMS, what returns a list of channel objects for channel in list_of_channels: print channel # extracting first waveform # getting voltages bin_res_x = [] bin_res_y = [] #for group in ("Cube X1",): for group in ("Cube X1", "Cube X2", "Cube Y1", "Cube Y2", "Cube Z1", "Cube Z2"): #for group in ("Cube X2", "Cube Y1", "Cube Y2", "Cube Z1", "Cube Z2"): print "Group: " + group cubeX1 = datafile.object('data',group) # we have one group and a list of channels in that group # getting time increment and then creating time array print cubeX1.properties dt = cubeX1.property('wf_increment') # extract the time information of the properties of the group "cubeX1"
def cut_log_file(fileinpaths, times, fileoutpaths_list, **kwargs): """ Cuts up a log file based on the supplied times. This function assumes that the channels are waveforms. """ for i, fileinpath in enumerate(fileinpaths): fileoutpaths = fileoutpaths_list[i] tdmsfile = TF(fileinpath) for j in range(len(times)): time1 = times[j][0] time2 = times[j][1] fileoutpath = fileoutpaths[j] direc = os.path.split(fileoutpath)[0] if not os.path.exists(direc): os.makedirs(direc) root_object = RootObject(properties={ #TODO root properties }) timegroupwritten = False try: with TdmsWriter(fileoutpath, mode='w') as tdms_writer: for group in tdmsfile.groups(): if 'TimeChannelName' in kwargs: if 'TimeGroupName' in kwargs: timegroup = kwargs['TimeGroupName'] else: timegroup = group timegroupwritten = False timechannel = tdmsfile.object( timegroup, kwargs['TimeChannelName']) timedata = timechannel.data timedata = np.array( list(map(lambda x: np.datetime64(x), timedata))) timedata = timedata.astype('M8[us]') if timegroupwritten == False: timechannel_cut = _cut_datetime_channel( timechannel, time1, time2) tdms_writer.write_segment( [root_object, timechannel_cut]) timegroupwritten = True waveform = False else: waveform = True for channel in tdmsfile.group_channels(group): # if type(channel.data_type.size) == type(None): break #skips over non numeric channels if channel.data_type == nptdms.types.DoubleFloat: if waveform: timedata = channel.time_track( absolute_time=True) idx1, idx2 = _get_indextime( timedata, time1, time2) channel_object = _cut_channel( channel, idx1, idx2, waveform) tdms_writer.write_segment( [root_object, channel_object]) except ValueError as error: print(error) print('removing the file at: \n', fileoutpath) os.remove(fileoutpath)
from nptdms import TdmsFile import numpy as np import pandas as pd from matplotlib import pyplot as plt import os from scipy.signal import butter, lfilter, freqz, resample, wiener, gaussian from scipy.ndimage import filters from collections import namedtuple #tdms_path = r'C:\Users\maksymilianm\Dropbox (UCL - SWC)\Project_spiders\Raw_data\def_behav_probe\30_10_19_sp10_LDR.tdms' tdms_path = r'C:\Users\maksymilianm\Dropbox (UCL - SWC)\Project_spiders\Analysis\confined_shade_arena_escape\17_02_20_sp2\spider_camera_ldr(0).tdms' tdms_file = TdmsFile(tdms_path) photodiode_raw = tdms_file.group_channels('Photodiode')[0].data spider_camera_input = tdms_file.group_channels('spider_camera_input')[0].data def butter_lowpass(cutoff, fs, order=5): nyq = 0.5 * fs normal_cutoff = cutoff / nyq b, a = butter(order, normal_cutoff, btype='low', analog=False) return b, a def butter_lowpass_filter(data, cutoff, fs, order=5): b, a = butter_lowpass(cutoff, fs, order=order) y = lfilter(b, a, data) return y def remove_noise(data):
def MergeSavePlotTDMS( mypath, VMAG = {'delta':0.1, 'low':0.1}, NCY = {'pre':3, 'post': 5}, BASE = {'Vln':277.128} ): """ Merges TDMS files, crops time to island formation to cessasion, saves in excel Input: Directory with the test result files, e.g.: "aLabView\\20150306" MergeTDMS maintains several Excel files in the target directory: MergeSummary.xlsx SignalsInfo.xlsx CroppedData.xlsx """ # import pdb # debugger import datetime import pandas as pd # multidimensional data analysis # import xlsxwriter # import numpy as np # Matplotlib === # import matplotlib # matplotlib.use('Agg') import matplotlib.pyplot as plt # from matplotlib.backends.backend_pdf import PdfPages # Wei's advice === import matplotlib.backends.backend_pdf as dpdf from os import listdir from os.path import isfile, join from nptdms import TdmsFile # from numpy import array from numpy import cos, sin, arctan2 from pandas import concat, ExcelWriter, rolling_mean CONFIG = {'WriteAllDataToExcel':False, # Will take forever ~5min for 27GB 'WriteLimitedDataToExcel':False, # Only data from island creation to cessation 'WriteSummaryToExcel':True, # Summary from TDMS files 'ValidateCTs':False, # Plot pages that validate CT scaling and orientation 'PlotFullRange': False} # Add a page with full time range of data # BASE = {'Vln':480/sqrt(3)} # Voltage base # B2 LC1 CT group was reversed during calibration B2LC1SIGN = -1.0 # # LC1 B1 CT was reversed on 20150311, restored, then reversed again during PG&E CT calibration B1LC1SIGN = -1.0 # reversed CT. Use +1 for correct polarity # Limiting plot range of acquired signals # Islanding detection works by comparing: 'Island Contactor status' > icsLvl # abs(uVmag-iVmag)>delta # Collapse detection works by comparing: iVmag<low VMAG = {'icsLvl': 3, 'delta':0.1, 'low':0.1} # island contactor status Level, Signal magnitudes in p.u. to limit plot range NCY = {'pre':3, 'post': 5} # Number of cycles to show pre-islanding and post-collapse # mypath = 'aLabView\\20150311' # Now a function parameter myfiles = [f for f in listdir(mypath) if isfile(join(mypath,f)) ] # filtering for .tdms extension tdmsfiles_list = [f for f in myfiles if f.endswith(".tdms")] # empty dictionaries ch_data = {} # interim results sig_info = {} # fname -> DataFrame sig_data = {} # fname -> DataFrame time,signal,...,signal file_info = {} # fname -> DataFrame Comment fiComment = [] # list to collect all file comments fiProgRev = [] # ditto but program version # Cycling through files to figure out how many are concatenated for fname in tdmsfiles_list: tdms_file = TdmsFile(mypath + "\\" + fname) # fetching comments tdms_root = tdms_file.object() fiComment = fiComment + [tdms_root.property(u'Comment')] fiProgRev = fiProgRev + [tdms_root.property(u'Program Revision')] # groups_list = tdms_file.groups() # There is only one group 'Analog' channels_list = tdms_file.group_channels(u'Analog') # u for unicode. # pdb.set_trace() # Debugger stop if desired # ch_names = [ch.property(u'NI_ChannelName') for ch in channels_list] ch_names = [ch.path.split("/")[2].strip("' ") for ch in channels_list] ch_slope = [ch.property(u'NI_Scale[0]_Linear_Slope') for ch in channels_list] ch_icept = [ch.property(u'NI_Scale[0]_Linear_Y_Intercept') for ch in channels_list] ch_tstrt = [ch.property(u'wf_start_time') for ch in channels_list] ch_tincr = [ch.property(u'wf_increment') for ch in channels_list] ch_tsamp = [ch.property(u'wf_samples') for ch in channels_list] ch_toffs = [ch.property(u'wf_start_offset') for ch in channels_list] ch_tend = [ch.property(u'wf_start_time') + datetime.timedelta( seconds=ch.property(u'wf_increment')* ch.property(u'wf_samples')) for ch in channels_list] ch_scld = [ch.property(u'NI_Scaling_Status')!=u'unscaled' for ch in channels_list] # pack all this into a dataframe sig_info[fname] = pd.DataFrame({ 'chName': ch_names, 'chScaled': ch_scld, 'chScale': ch_slope, 'chIcept': ch_icept, 'chTstart': ch_tstrt, 'chTend': ch_tend, 'chTincr': ch_tincr}, columns=['chName', 'chScaled', 'chScale', 'chIcept', 'chTstart', 'chTend', 'chTincr']) ch_data['Time'] = ch.time_track() for ch in channels_list: # ch_data[ch.property(u'NI_ChannelName')] = ch.data*ch.property(u'NI_Scale[0]_Linear_Slope') ch_data[ch.path.split("/")[2].strip("' ")] = ch.data*ch.property(u'NI_Scale[0]_Linear_Slope') sig_data[fname] = pd.DataFrame(ch_data,columns=['Time']+ch_names) file_info = pd.DataFrame({ 'fiComment': fiComment, 'fiProgRev': fiProgRev}, columns=['fiComment', 'fiProgRev'], index=tdmsfiles_list) # Concatenating files that have a matching chTstart chTend keys = sorted(sig_info.keys()) flast = keys[0] df1 = sig_info[flast] tStartLast = df1[df1.chName == u'Utility Bus V A'].chTstart.values[0] tEndLast = df1[df1.chName == u'Utility Bus V A'].chTend.values[0] for fname in keys[1:]: df1 = sig_info[fname] tStart = df1[df1.chName == u'Utility Bus V A'].chTstart.values[0] tEnd = df1[df1.chName == u'Utility Bus V A'].chTend.values[0] if(tEndLast == tStart): # merge files print tEndLast print tStart print fname + " continues " + flast sig_data[fname].Time += datetime.timedelta.total_seconds(tEndLast-tStartLast) sig_data[flast] = concat([sig_data[flast],sig_data[fname]],ignore_index=True) del sig_data[fname] # removes object from dictionary sig_info[flast].chTend = sig_info[fname].chTend del sig_info[fname] file_info = file_info.drop(fname) tEndLast = tEnd else: tStartLast = tStart tEndLast = tEnd flast = fname if CONFIG['WriteSummaryToExcel']: writer = ExcelWriter(mypath + '\\MergeSummary.xlsx') file_info.to_excel(writer,'file_info') if False: # error due to time zone awareness in LabView time stamps for fname in file_info.index.values.tolist(): sig_info[fname].to_excel(writer,fname) writer.save() if CONFIG['WriteAllDataToExcel']: # This takes forever -- 5min for ~27GB writer = ExcelWriter(mypath + '\\AllData.xlsx') for fname in file_info.index.values.tolist(): sig_data[fname].to_excel(writer,fname) writer.save() # Only the data from island formation to cessation if CONFIG['WriteLimitedDataToExcel']: # file is open here, but written from within the plot loop print "Opening: LimitedData.xlsx" writer = ExcelWriter(mypath + '\\LimitedData.xlsx') # Plotting results # Open pdf file print "Opening: Results.pdf" pltPdf = dpdf.PdfPages(mypath + '\\Results.pdf') # prepare a list of files to plot file_list = file_info.index.values.tolist(); for fname in file_list: # for fname in [file_list[0]]: print "Processing: " + fname # Utility voltage magnitude: alpha beta -> mag uVa=sig_data[fname][u'Utility Bus V A'].values uVb=sig_data[fname][u'Utility Bus V B'].values uVc=sig_data[fname][u'Utility Bus V C'].values uVal = uVa - 0.5 * (uVb + uVc) uVbe = sqrt(3.)/2. * (uVb - uVc) uVmag = 2./3.*sqrt(uVal*uVal+uVbe*uVbe) sig_data[fname][u'Utility Vmag'] = pd.Series(uVmag,index=sig_data[fname].index) # Island voltage magnitude: alpha beta -> mag iVa=sig_data[fname][u'Island Bus V A'].values iVb=sig_data[fname][u'Island Bus V B'].values iVc=sig_data[fname][u'Island Bus V C'].values iVal = iVa - 0.5 * (iVb + iVc) iVbe = sqrt(3.)/2. * (iVb - iVc) iVmag = 2./3.*sqrt(iVal*iVal+iVbe*iVbe) sig_data[fname][u'Island Val'] = pd.Series(iVal,index=sig_data[fname].index) sig_data[fname][u'Island Vbe'] = pd.Series(iVbe,index=sig_data[fname].index) sig_data[fname][u'Island Vmag'] = pd.Series(iVmag,index=sig_data[fname].index) # Island voltage frequency calculations using PLL. Must execute in a for loop, can't vectorize L_VlnIn = 480*sqrt(2.)/sqrt(3.) Pll_BW = 4.0*377 GmPllWn = .725*Pll_BW GmPllPrpGn = Pll_BW/L_VlnIn GmPllIntGn = GmPllWn*GmPllWn/L_VlnIn GmPllWInt = 377. GmPllWIntMx = 2.5*GmPllWInt GmPllWIntMn = -0.5*GmPllWInt GmPllW = 377. L_DelTm1 = sig_info[fname].chTincr.values[0] # Taking the first channel's time increment GmAngElecFbk = -arctan2(iVbe[0],iVal[0]) iVx = zeros(iVa.shape) # setting output arrays to zero iVy = zeros(iVa.shape) iWpll = ones(iVa.shape)*377.0 for i in range(0,iVa.shape[0]): # calculate angle GmPllDelAng = L_DelTm1*GmPllW; GmAngElecFbk = mod(GmAngElecFbk + GmPllDelAng, 2*pi) # Calculate voltage transform iVx[i] = iVal[i]*cos(GmAngElecFbk) + iVbe[i]*sin(GmAngElecFbk) iVy[i] = -iVal[i]*sin(GmAngElecFbk) + iVbe[i]*cos(GmAngElecFbk) # calculate voltage error GmPllVyErr = -iVy[i] # Calculate integral term, clamp GmPllWInt = GmPllWInt + GmPllIntGn*L_DelTm1*GmPllVyErr if (GmPllWInt > GmPllWIntMx): GmPllWInt = GmPllWIntMx if (GmPllWInt < GmPllWIntMn): GmPllWInt = GmPllWIntMn # Calculate PLL frequency, clamp GmPllW = GmPllWInt + GmPllVyErr*GmPllPrpGn; if (GmPllW > GmPllWIntMx): GmPllW = GmPllWIntMx if (GmPllW < GmPllWIntMn): GmPllW = GmPllWIntMn iWpll[i] = GmPllWInt sig_data[fname][u'Island Vx'] = pd.Series(iVx, index=sig_data[fname].index) sig_data[fname][u'Island Vy'] = pd.Series(iVy, index=sig_data[fname].index) sig_data[fname][u'Island Wpll'] = pd.Series(iWpll,index=sig_data[fname].index) # Island voltage rms values using rolling_mean of squared signals iVa2 = iVa*iVa iVb2 = iVb*iVb iVc2 = iVc*iVc sig_data[fname][u'Island Va^2'] = pd.Series(iVa2,index=sig_data[fname].index) sig_data[fname][u'Island Vb^2'] = pd.Series(iVb2,index=sig_data[fname].index) sig_data[fname][u'Island Vc^2'] = pd.Series(iVc2,index=sig_data[fname].index) tinc = sig_info[fname]['chTincr'][sig_info[fname]['chName']==u'Island Bus V A'].values[0] Varms = sqrt(rolling_mean(sig_data[fname][u'Island Va^2'],1./60./tinc).values) Vbrms = sqrt(rolling_mean(sig_data[fname][u'Island Vb^2'],1./60./tinc).values) Vcrms = sqrt(rolling_mean(sig_data[fname][u'Island Vc^2'],1./60./tinc).values) sig_data[fname][u'Island Varms'] = pd.Series(Varms,index=sig_data[fname].index) sig_data[fname][u'Island Vbrms'] = pd.Series(Vbrms,index=sig_data[fname].index) sig_data[fname][u'Island Vcrms'] = pd.Series(Vcrms,index=sig_data[fname].index) # Island voltage sequence components based on rms values Vposx = Varms - Vbrms*cos(pi/3)*cos(2*pi/3) + Vbrms*sin(pi/3)*sin(2*pi/3) - Vcrms*cos(pi/3)*cos(4*pi/3) - Vcrms*sin(pi/3)*sin(4*pi/3) Vposy = - Vbrms*cos(pi/3)*sin(2*pi/3) - Vbrms*sin(pi/3)*cos(2*pi/3) - Vcrms*cos(pi/3)*sin(4*pi/3) + Vcrms*sin(pi/3)*cos(4*pi/3) Vpos = sqrt(Vposx*Vposx+Vposy*Vposy)/3 Vnegx = Varms - Vbrms*cos(pi/3)*cos(4*pi/3) + Vbrms*sin(pi/3)*sin(4*pi/3) - Vcrms*cos(pi/3)*cos(2*pi/3) - Vcrms*sin(pi/3)*sin(2*pi/3) Vnegy = - Vbrms*cos(pi/3)*sin(4*pi/3) - Vbrms*sin(pi/3)*cos(4*pi/3) - Vcrms*cos(pi/3)*sin(2*pi/3) + Vcrms*sin(pi/3)*cos(2*pi/3) Vneg = sqrt(Vnegx*Vnegx+Vnegy*Vnegy)/3 Vzerx = Varms - Vbrms*cos(pi/3) - Vcrms*cos(pi/3) Vzery = - Vbrms*sin(pi/3) + Vcrms*sin(pi/3) Vzer = sqrt(Vzerx*Vzerx+Vzery*Vzery)/3 sig_data[fname][u'Island Vpos'] = pd.Series(Vpos,index=sig_data[fname].index) sig_data[fname][u'Island Vneg'] = pd.Series(Vneg,index=sig_data[fname].index) sig_data[fname][u'Island Vzer'] = pd.Series(Vzer,index=sig_data[fname].index) # Utility currents uIa=sig_data[fname][u'Utility I A'].values uIb=sig_data[fname][u'Utility I B'].values uIc=sig_data[fname][u'Utility I C'].values uIal = uIa - 0.5 * (uIb + uIc) uIbe = sqrt(3.)/2. * (uIb - uIc) sig_data[fname][u'uIal'] = pd.Series(uIal,index=sig_data[fname].index) sig_data[fname][u'uIbe'] = pd.Series(uIbe,index=sig_data[fname].index) # Utility Power calcuations kW uP = (uVa*uIa+uVb*uIb+uVc*uIc)/1000 uQ = ((uVb-uVc)*uIa+(uVa-uVb)*uIc+(uVc-uVa)*uIb)/sqrt(3)/1000 sig_data[fname][u'P Utility'] = pd.Series(uP,index=sig_data[fname].index) sig_data[fname][u'Q Utility'] = pd.Series(uQ,index=sig_data[fname].index) # RLC currents rIa=sig_data[fname][u'RLC Passive Load I A'].values rIb=sig_data[fname][u'RLC Passive Load I B'].values rIc=sig_data[fname][u'RLC Passive Load I C'].values # RLC power calcuations rP = (iVa*rIa+iVb*rIb+iVc*rIc)/1000 rQ = ((iVb-iVc)*rIa+(iVa-iVb)*rIc+(iVc-iVa)*rIb)/sqrt(3)/1000 sig_data[fname][u'P RLC'] = pd.Series(rP,index=sig_data[fname].index) sig_data[fname][u'Q RLC'] = pd.Series(rQ,index=sig_data[fname].index) # Amplifier currents ampIa=sig_data[fname][u'GE Load I A'].values ampIb=sig_data[fname][u'GE Load I B'].values ampIc=sig_data[fname][u'GE Load I C'].values # Amplifier power calculations ampP = (iVa*ampIa+iVb*ampIb+iVc*ampIc)/1000 ampQ = ((iVb-iVc)*ampIa+(iVa-iVb)*ampIc+(iVc-iVa)*ampIb)/sqrt(3)/1000 sig_data[fname][u'P AMP'] = pd.Series(ampP,index=sig_data[fname].index) sig_data[fname][u'Q AMP'] = pd.Series(ampQ,index=sig_data[fname].index) # B2 currents b2Ia=B2LC1SIGN*sig_data[fname][u'B2 LC1 I A'].values b2Ib=B2LC1SIGN*sig_data[fname][u'B2 LC1 I B'].values b2Ic=B2LC1SIGN*sig_data[fname][u'B2 LC1 I C'].values # B2 Power calculations b2P = (iVa*b2Ia+iVb*b2Ib+iVc*b2Ic)/1000 b2Q = ((iVb-iVc)*b2Ia+(iVa-iVb)*b2Ic+(iVc-iVa)*b2Ib)/sqrt(3)/1000 sig_data[fname][u'P B2'] = pd.Series(b2P,index=sig_data[fname].index) sig_data[fname][u'Q B2'] = pd.Series(b2Q,index=sig_data[fname].index) # B1 currents b1LC1=B1LC1SIGN*sig_data[fname][u'B1 LC1 I'].values b1LC2=sig_data[fname][u'B1 LC2 I'].values b1LC3=sig_data[fname][u'B1 LC3 I'].values b1Ia = b1LC1 - b1LC2 b1Ib = b1LC3 - b1LC1 b1Ic = b1LC2 - b1LC3 sig_data[fname][u'b1Ia'] = pd.Series(b1Ia,index=sig_data[fname].index) sig_data[fname][u'b1Ib'] = pd.Series(b1Ib,index=sig_data[fname].index) sig_data[fname][u'b1Ic'] = pd.Series(b1Ic,index=sig_data[fname].index) # B1 Power calculations b1P = (iVa*b1Ia+iVb*b1Ib+iVc*b1Ic)/1000 b1Q = ((iVb-iVc)*b1Ia+(iVa-iVb)*b1Ic+(iVc-iVa)*b1Ib)/sqrt(3)/1000 sig_data[fname][u'P B1'] = pd.Series(b1P,index=sig_data[fname].index) sig_data[fname][u'Q B1'] = pd.Series(b1Q,index=sig_data[fname].index) # Total PV calculations (banks 1 and 2) pvIa = b1Ia + b2Ia pvIb = b1Ib + b2Ib pvIc = b1Ic + b2Ic sig_data[fname][u'pvIa'] = pd.Series(pvIa,index=sig_data[fname].index) sig_data[fname][u'pvIb'] = pd.Series(pvIb,index=sig_data[fname].index) sig_data[fname][u'pvIc'] = pd.Series(pvIc,index=sig_data[fname].index) pvIal = pvIa - 0.5 * (pvIb + pvIc) pvIbe = sqrt(3.)/2. * (pvIb - pvIc) sig_data[fname][u'pvIal'] = pd.Series(pvIal,index=sig_data[fname].index) sig_data[fname][u'pvIbe'] = pd.Series(pvIbe,index=sig_data[fname].index) # Penetration calculations # penB1 = where(iVmag/sqrt(2)/BASE['Vln'] > VMAG['low'],b1P/rP,NaN) # penB2 = where(iVmag/sqrt(2)/BASE['Vln'] > VMAG['low'],b2P/rP,NaN) # penPV = where(iVmag/sqrt(2)/BASE['Vln'] > VMAG['low'],(b1P+b2P)/rP,NaN) penB1 = where(sig_data[fname][u'Island Contactor status'] > VMAG['icsLvl'],b1P/(rP+ampP),NaN) penB2 = where(sig_data[fname][u'Island Contactor status'] > VMAG['icsLvl'],b2P/(rP+ampP),NaN) penPV = where(sig_data[fname][u'Island Contactor status'] > VMAG['icsLvl'],(b1P+b2P)/(rP+ampP),NaN) sig_data[fname][u'B1 pen'] = pd.Series(penB1,index=sig_data[fname].index) sig_data[fname][u'B2 pen'] = pd.Series(penB2,index=sig_data[fname].index) sig_data[fname][u'B1+B2 pen'] = pd.Series(penPV,index=sig_data[fname].index) # Selecting a region of interest: island creation to cessation df1 = sig_data[fname] # ix1 = df1[abs(df1[u'Utility Vmag']-df1[u'Island Vmag'])/sqrt(2)/BASE['Vln'] > VMAG['delta']].index.values[0] if df1[abs(df1[u'Island Contactor status']) < VMAG['icsLvl']].empty: ix1 = df1.index.values[-1]/2 else: ix1 = df1[abs(df1[u'Island Contactor status']) < VMAG['icsLvl']].index.values[0] if df1[abs(df1[u'Island Vmag'])/sqrt(2)/BASE['Vln'] < VMAG['low']].empty: ix2 = df1.index.values[-1]/2 else: ix2 = df1[abs(df1[u'Island Vmag'])/sqrt(2)/BASE['Vln'] < VMAG['low']].index.values[0] tinc = sig_info[fname]['chTincr'][sig_info[fname]['chName']==u'Utility Bus V A'].values[0] left = int(NCY['pre']*1./60./tinc) right = int(NCY['post']*1./60./tinc) ix1 = max([ix1-left,0]) ix2 = min([ix2+right,df1.index.values[-1]]) df2 = df1[(df1.index > ix1) & (df1.index < ix2)] if CONFIG['WriteLimitedDataToExcel']: # Only the data from island formation to cessation df2.to_excel(writer,fname) # data is written here if True: # Place to try new things # Fig1: Utility voltage fig, (ax0, ax1)= plt.subplots(nrows=2, ncols=1, figsize=(8.5,11)) fig.suptitle(fname) # This titles the figure # File info output to page top label= file_info[file_info.index==fname][['fiComment']].values[0][0] ax0.annotate(label, xy=(0.5/8.5, 10.5/11), # (0.5,-0.25)inch from top left corner xycoords='figure fraction', horizontalalignment='left', verticalalignment='top', fontsize=10) subplots_adjust(top=9./11.) # Alpha/Beta plots ax0.set_title('Island Voltage Al/Be') ax0.plot(df2['Island Val']/1.5/sqrt(2)/BASE['Vln'], df2['Island Vbe']/1.5/sqrt(2)/BASE['Vln']) ax0.set_xlim([-1.5,1.5]) ax0.set_ylim([-1.2,1.2]) ax0.grid(True, which='both') ax0.set_aspect('equal') ax1.set_title('Currents Al/Be') ax1.plot(df2['pvIal']/1.5, df2['pvIbe']/1.5) # ax1.set_ylim([-1.2,1.2]) ax1.grid(True, which='both') ax1.set_aspect('equal') # ax1.set_title('Island Voltage Al/Be') # ax1.plot(df2['Time'], df2['Island Val']/1.5/sqrt(2)/BASE['Vln']) # ax1.plot(df2['Time'], df2['Island Vbe']/1.5/sqrt(2)/BASE['Vln']) # ax1.set_ylim([-1.2,1.2]) # ax1.grid(True, which='both') pltPdf.savefig() # saves fig to pdf plt.close() # Closes fig to clean up memory if False: # Adding a chart with PLL variables # Fig1a: fig, (ax0,ax1,ax2,ax3) = plt.subplots(nrows=4, ncols=1, figsize=(8.5,11), sharex=True) fig.suptitle(fname) # This titles the figure ax0.set_title('Utility Bus Vabc') ax0.plot(df2['Time'], df2[u'Utility Bus V A']) ax0.plot(df2['Time'], df2[u'Utility Bus V B']) ax0.plot(df2['Time'], df2[u'Utility Bus V C']) ax0.set_ylim([-500,500]) ax0.grid(True, which='both') ax1.set_title('Island Bus Vabc') ax1.plot(df2['Time'], df2[u'Island Bus V A']) ax1.plot(df2['Time'], df2[u'Island Bus V B']) ax1.plot(df2['Time'], df2[u'Island Bus V C']) ax1.plot(df2['Time'], df2[u'Island Vmag']) # ax1.set_ylim([-500,500]) ax1.grid(True, which='both') ax2.set_title('Island Bus Frequency') ax2.plot(df2['Time'], df2[u'Island Wpll']/(2*pi)) # ax2.set_ylim([-100,100]) ax2.grid(True, which='both') ax3.set_title('Island Bus Vx, Vy') ax3.plot(df2['Time'], df2[u'Island Vx']) ax3.plot(df2['Time'], df2[u'Island Vy']) # ax3.set_ylim([-100,100]) ax3.grid(True, which='both') pltPdf.savefig() # Saves fig to pdf plt.close() # Closes fig to clean up memory if CONFIG['PlotFullRange']: # Plots a page with entire length of captured signals # Fig2: fig, (ax0,ax1,ax2,ax3,ax4) = plt.subplots(nrows=5, ncols=1, figsize=(8.5,11), sharex=True) # plt.title(fname) # this has no effect # ax0.set_title('Utility Bus Vabc') # ax0.plot(sig_data[fname]['Time'], sig_data[fname][u'Utility Bus V A']) # ax0.plot(sig_data[fname]['Time'], sig_data[fname][u'Utility Bus V B']) # ax0.plot(sig_data[fname]['Time'], sig_data[fname][u'Utility Bus V C']) # ax0.set_ylim([-500,500]) # ax0.grid(True, which='both') ax0.set_title('Island Bus Vabc') ax0.plot(sig_data[fname]['Time'], sig_data[fname][u'Island Bus V A']) ax0.plot(sig_data[fname]['Time'], sig_data[fname][u'Island Bus V B']) ax0.plot(sig_data[fname]['Time'], sig_data[fname][u'Island Bus V C']) ax0.plot(sig_data[fname]['Time'], sig_data[fname][u'Island Vmag']) ax0.set_ylim([-500,500]) ax0.grid(True, which='both') ax1.set_title('Island Bus Frequency') ax1.plot(df2['Time'], df2[u'Island Wpll']/(2*pi)) ax1.set_ylim([-120,120]) ax1.grid(True, which='both') ax2.set_title('RLC Load Current Iabc') ax2.plot(sig_data[fname]['Time'], sig_data[fname][u'RLC Passive Load I A']) ax2.plot(sig_data[fname]['Time'], sig_data[fname][u'RLC Passive Load I B']) ax2.plot(sig_data[fname]['Time'], sig_data[fname][u'RLC Passive Load I C']) ax2.set_ylim([-100,100]) ax2.grid(True, which='both') ax3.set_title('B1+B2 Iabc') ax3.plot(sig_data[fname]['Time'], sig_data[fname][u'pvIa']) ax3.plot(sig_data[fname]['Time'], sig_data[fname][u'pvIb']) ax3.plot(sig_data[fname]['Time'], sig_data[fname][u'pvIb']) ax3.set_ylim([-100,100]) ax3.grid(True, which='both') ax4.set_title('Utility Iabc') ax4.plot(sig_data[fname]['Time'], sig_data[fname][u'Utility I A']) ax4.plot(sig_data[fname]['Time'], sig_data[fname][u'Utility I B']) ax4.plot(sig_data[fname]['Time'], sig_data[fname][u'Utility I C']) ax4.set_ylim([-100,100]) ax4.grid(True, which='both') pltPdf.savefig() # Saves fig to pdf plt.close() # Closes fig to clean up memory if CONFIG['ValidateCTs']: # Plots a page to validate CT reads and orientation # FigX: fig, (ax0,ax1,ax2,ax3,ax4,ax5) = plt.subplots(nrows=6, ncols=1, figsize=(8.5,11), sharex=True) fig.suptitle(fname) # This titles the figure ax0.set_title('Phase A CTs: rlc_Ia = u_Ia + pv_Ia') ax0.plot(df2['Time'], df2[u'RLC Passive Load I A']) ax0.plot(df2['Time'], df2[u'Utility I A']+df2[u'pvIa']) # ax0.set_ylim([-50,50]) ax0.grid(True, which='both') ax1.set_title('Phase B CTs: rlc_Ib = u_Ib + pv_Ib') ax1.plot(df2['Time'], df2[u'RLC Passive Load I B']) ax1.plot(df2['Time'], df2[u'Utility I B']+df2[u'pvIb']) # ax1.set_ylim([-50,50]) ax1.grid(True, which='both') ax2.set_title('Phase C CTs: rlc_Ic = u_Ic + pv_Ic') ax2.plot(df2['Time'], df2[u'RLC Passive Load I C']) ax2.plot(df2['Time'], df2[u'Utility I C']+df2[u'pvIc']) # ax2.set_ylim([-50,50]) ax2.grid(True, which='both') ax3.set_title('Phase A CTs: u_Ia = rlc_Ia - pv_Ia, b2Ia') ax3.plot(df2['Time'], df2[u'Utility I A']) ax3.plot(df2['Time'], df2[u'RLC Passive Load I A']-df2[u'pvIa']) ax3.plot(df2['Time'], df2[u'B2 LC1 I A']) # ax3.set_ylim([-25,25]) ax3.grid(True, which='both') ax4.set_title('Phase B CTs: u_Ib = rlc_Ib - pv_Ib, b2Ib') ax4.plot(df2['Time'], df2[u'Utility I B']) ax4.plot(df2['Time'], df2[u'RLC Passive Load I B']-df2[u'pvIb']) ax4.plot(df2['Time'], df2[u'B2 LC1 I B']) # ax4.set_ylim([-25,25]) ax4.grid(True, which='both') ax5.set_title('Phase C CTs: u_Ic = rlc_Ic - pv_Ic, b2Ic') ax5.plot(df2['Time'], df2[u'Utility I C']) ax5.plot(df2['Time'], df2[u'RLC Passive Load I C']-df2[u'pvIc']) ax5.plot(df2['Time'], df2[u'B2 LC1 I C']) # ax5.set_ylim([-25,25]) ax5.grid(True, which='both') pltPdf.savefig() # Saves fig to pdf plt.close() # Closes fig to clean up memory fig, (ax0,ax1,ax2,ax3,ax4,ax5) = plt.subplots(nrows=6, ncols=1, figsize=(8.5,11), sharex=True) fig.suptitle(fname) # This titles the figure ax0.set_title('Phase A CTs: rlc_Ia = u_Ia + pv_Ia') ax0.plot(df2['Time'], df2[u'RLC Passive Load I A']) ax0.plot(df2['Time'], df2[u'Utility I A']+df2[u'pvIa']) # ax0.set_ylim([-50,50]) ax0.grid(True, which='both') ax1.set_title('Phase B CTs: rlc_Ib = u_Ib + pv_Ib') ax1.plot(df2['Time'], df2[u'RLC Passive Load I B']) ax1.plot(df2['Time'], df2[u'Utility I B']+df2[u'pvIb']) # ax1.set_ylim([-50,50]) ax1.grid(True, which='both') ax2.set_title('Phase C CTs: rlc_Ic = u_Ic + pv_Ic') ax2.plot(df2['Time'], df2[u'RLC Passive Load I C']) ax2.plot(df2['Time'], df2[u'Utility I C']+df2[u'pvIc']) # ax2.set_ylim([-50,50]) ax2.grid(True, which='both') ax3.set_title('Phase A CTs: pv_Ia = rlc_Ia - u_Ia, b1Ia, b2Ia') ax3.plot(df2['Time'], df2[u'RLC Passive Load I A']-df2[u'Utility I A']) ax3.plot(df2['Time'], df2[u'pvIa']) ax3.plot(df2['Time'], df2[u'b1Ia']) ax3.plot(df2['Time'], df2[u'B2 LC1 I A']) # ax3.set_ylim([-50,50]) ax3.grid(True, which='both') ax4.set_title('Phase B CTs: pv_Ib = rlc_Ib - u_Ib, b1Ib, b2Ib') ax4.plot(df2['Time'], df2[u'RLC Passive Load I B']-df2[u'Utility I B']) ax4.plot(df2['Time'], df2[u'pvIb']) ax4.plot(df2['Time'], df2[u'b1Ib']) ax4.plot(df2['Time'], df2[u'B2 LC1 I B']) # ax4.set_ylim([-50,50]) ax4.grid(True, which='both') ax5.set_title('Phase C CTs: pv_Ic = rlc_Ic - u_Ic, b1Ic, b2Ic') ax5.plot(df2['Time'], df2[u'RLC Passive Load I C']-df2[u'Utility I C']) ax5.plot(df2['Time'], df2[u'pvIc']) ax5.plot(df2['Time'], df2[u'b1Ic']) ax5.plot(df2['Time'], df2[u'B2 LC1 I C']) # ax5.set_ylim([-50,50]) ax5.grid(True, which='both') pltPdf.savefig() # Saves fig to pdf plt.close() # Closes fig to clean up memory # Fig3: fig, (ax0,ax1,ax2,ax3,ax4) = plt.subplots(nrows=5, ncols=1, figsize=(8.5,11), sharex=True) fig.suptitle(fname) # This titles the figure # ax0.set_title('Utility Bus Vabc') # ax0.plot(df2['Time'], df2[u'Utility Bus V A']) # ax0.plot(df2['Time'], df2[u'Utility Bus V B']) # ax0.plot(df2['Time'], df2[u'Utility Bus V C']) # ax0.set_ylim([-500,500]) # ax0.grid(True, which='both') ax0.set_title('Island Bus Vabc') ax0.plot(df2['Time'], df2[u'Island Bus V A']) ax0.plot(df2['Time'], df2[u'Island Bus V B']) ax0.plot(df2['Time'], df2[u'Island Bus V C']) ax0.plot(df2['Time'], df2[u'Island Vmag']) # ax0.set_ylim([-500,500]) ax0.grid(True, which='both') ax1.set_title('Island Bus Frequency') ax1.plot(df2['Time'], df2[u'Island Wpll']/(2*pi)) ax1.set_ylim([-60, 180]) ax1.grid(True, which='both') ax2.set_title('Total Load Current Iabc') ax2.plot(df2['Time'], df2[u'RLC Passive Load I A']+df2[u'GE Load I A']) ax2.plot(df2['Time'], df2[u'RLC Passive Load I B']+df2[u'GE Load I B']) ax2.plot(df2['Time'], df2[u'RLC Passive Load I C']+df2[u'GE Load I C']) # ax2.set_ylim([-100,100]) ax2.grid(True, which='both') ax3.set_title('B1+B2 Iabc') ax3.plot(df2['Time'], df2[u'pvIa']) ax3.plot(df2['Time'], df2[u'pvIb']) ax3.plot(df2['Time'], df2[u'pvIc']) # ax3.set_ylim([-100,100]) ax3.grid(True, which='both') ax4.set_title('Utility Iabc') ax4.plot(df2['Time'], df2[u'Utility I A']) ax4.plot(df2['Time'], df2[u'Utility I B']) ax4.plot(df2['Time'], df2[u'Utility I C']) # ax4.set_ylim([-100,100]) ax4.grid(True, which='both') pltPdf.savefig() # Saves fig to pdf plt.close() # Closes fig to clean up memory # Fig4: fig, (ax0,ax1,ax2,ax3,ax4) = plt.subplots(nrows=5, ncols=1, figsize=(8.5,11), sharex=True) fig.suptitle(fname) # This titles the figure ax0.set_title('P[kW]: Utility, Load, PV') ax0.plot(df2['Time'], df2[u'P Utility']) ax0.plot(df2['Time'], df2[u'P RLC']+df2[u'P AMP']) ax0.plot(df2['Time'], df2[u'P B1']+df2[u'P B2']) # ax0.set_ylim([-50,250]) ax0.grid(True, which='both') ax1.set_title('Q[kVAr]: Utility, Load, PV') ax1.plot(df2['Time'], df2[u'Q Utility']) ax1.plot(df2['Time'], df2[u'Q RLC']+df2[u'Q AMP']) ax1.plot(df2['Time'], df2[u'Q B1']+df2[u'Q B2']) # ax1.set_ylim([-80,80]) ax1.grid(True, which='both') ax2.set_title('Island Vpos, pu penetration') ax2.plot(df2['Time'], df2[u'Island Vpos']/BASE['Vln']) ax2.plot(df2['Time'], df2[u'B1+B2 pen']) ax2.set_ylim([0,1.5]) ax2.grid(True, which='both') ax3.set_title('Island Vneg, Vzero') ax3.plot(df2['Time'], df2[u'Island Vneg']/BASE['Vln']) ax3.plot(df2['Time'], df2[u'Island Vzer']/BASE['Vln']) # ax3.set_ylim([0,0.25]) ax3.grid(True, which='both') ax4.set_title('Island Vrms abc') ax4.plot(df2['Time'], df2[u'Island Varms']/BASE['Vln']) ax4.plot(df2['Time'], df2[u'Island Vbrms']/BASE['Vln']) ax4.plot(df2['Time'], df2[u'Island Vcrms']/BASE['Vln']) # ax4.set_ylim([0,1.25]) ax4.grid(True, which='both') pltPdf.savefig() # Saves fig to pdf plt.close() # Closes fig to clean up memory print "Closing: Results.pdf" pltPdf.close() # Close the pdf file if CONFIG['WriteLimitedDataToExcel']: # Close excel file print "Writing: LimitedData.xlsx" writer.save() # file is saved here return
# Read the TDMS data rack_1 = TdmsFile(path_to_file_1) rack_2 = TdmsFile(path_to_file_2) root_object = rack_1.object() root_object_2 = rack_2.object() # Get groups (rack names) # automatize in the future group_names_1 = rack_1.groups() group_names_2 = rack_2.groups() print('List of groups of the rack_1 TDMS:', group_names_1[0]) print('List of groups of the rack_2 TDMS:', group_names_2[0]) # Get channel names of the group if len(group_names_1) == 1: channel_names_1 = rack_1.group_channels(group_names_1[0]) if len(group_names_2) == 1: channel_names_2 = rack_2.group_channels(group_names_2[0]) # Get the data from channels PT101 = rack_1.channel_data(group_names_1[0], "PT101") PT102 = rack_1.channel_data(group_names_1[0], "PT102") PT103 = rack_2.channel_data(group_names_2[0], "PT103") PT104 = rack_2.channel_data(group_names_2[0], "PT104") PT551 = rack_1.channel_data(group_names_1[0], "PT551") PT552 = rack_1.channel_data(group_names_1[0], "PT552") PT553 = rack_1.channel_data(group_names_1[0], "PT553") PT421 = rack_1.channel_data(group_names_1[0], "PT421") MT401S = rack_1.channel_data(group_names_1[0], "MT401S") MT401T = rack_1.channel_data(group_names_1[0], "MT401T") MT401J = rack_1.channel_data(group_names_1[0], "MT401J")
show_properties = False show_data = False show_time_track = False level = 0 root = tdmsfile.object() display('/', level) if show_properties: display_properties(root, level) for group in tdmsfile.groups(): level = 1 group_obj = tdmsfile.object(group) display("%s" % group_obj.path, level) if show_properties: display_properties(group_obj, level) for channel in tdmsfile.group_channels(group): level = 2 display("%s" % channel.path, level) if show_properties: level = 3 display("data type: %s" % channel.data_type.name, level) display_properties(channel, level) if show_time_track: level = 3 try: time_track = channel.time_track() print time_track except KeyError: print "no time track" if show_data: level = 3