Пример #1
0
 def readTDMS(self):
     tdms = TdmsFile(self.filepath)
     self.RTD1_list = tdms.object("Meta Data", "MA-RTD 1").data
     self.RTD2_list = tdms.object("Meta Data", "MA-RTD 2").data
     Time = tdms.object("Meta Data", "Time").data
     Torque_CMD = tdms.object("Meta Data", "MA_Command.Torque").data
     t0 = Torque_CMD[80]
     j = 0
     for t in Torque_CMD:
         if t >= t0:
             break
         else:
             j += 1
     else:
         j = 0
     self.j = j
     self.RTD1_list = self.RTD1_list[j:]
     self.RTD2_list = self.RTD2_list[j:]
     Time = Time[j:]
     # 时间需要年月日,默认设置为2019年1月1日
     zero = datetime(2019, 1, 1)
     zero = mdates.date2num(zero)
     T0 = mdates.date2num(Time[0])
     Time = mdates.date2num(Time)
     self.Relative_time = [t - T0 + zero for t in Time]
Пример #2
0
def tdmsfuncapr14(filename):
    # Accepts a filename and path. Opens TDMS file and extracts 4 columns of data, as per April 14 tests.
    # Load the file
    tdms_file = TdmsFile(filename)
    # Specify the channel to load. Format is tab, and then channel string name
    channel1 = tdms_file.object('Untitled', '1khz (Filtered)')
    channel2 = tdms_file.object('Untitled', '10khz (Filtered)')
    channel3 = tdms_file.object('Untitled', '40khz (Filtered)')
    channel4 = tdms_file.object('Untitled', '100khz (Filtered)')
    #    time= tdms_file.object('Untitled','Time*')
    c1 = channel1.data
    c2 = channel2.data
    c3 = channel3.data
    c4 = channel4.data
    c1 = np.reshape(c1, (len(c1), 1))
    c2 = np.reshape(c2, (len(c2), 1))
    c3 = np.reshape(c3, (len(c3), 1))
    c4 = np.reshape(c4, (len(c4), 1))

    mean1 = np.mean(c1)
    mean2 = np.mean(c2)
    mean3 = np.mean(c3)
    mean4 = np.mean(c4)

    #    print 'mean of data is',average
    #    print 'distance is', distance,'mm'
    #    print 'filename is',filename
    return mean1, mean2, mean3, mean4, c1, c2, c3, c4
def load_tdms(pdata_filepath, smooth_SD=25):
    '''Open labview file and extract data, smooth fluorescence signals with gaussian filter
    of standard deviation smooth_SD ms.'''
    tdms_file = TdmsFile(pdata_filepath)
    time = 1000 * tdms_file.object('Untitled', 'GCAMP').time_track()
    gcamp = tdms_file.object('Untitled', 'GCAMP').data
    rfp = tdms_file.object('Untitled', 'RFP').data
    TTL = tdms_file.object('Untitled', 'pyControl').data
    start_time = tdms_file.object('Untitled', 'Time').data[0]
    TTL_high = TTL > 1.5
    TTL_times = time[np.where(TTL_high[1:] & ~TTL_high[:-1])[0] + 1]
    fs = 1000 / np.median(time[1:] - time[:-1])
    if smooth_SD:
        gcamp = gaussian_filter1d(gcamp, smooth_SD * fs / 1000.)
        rfp = gaussian_filter1d(rfp, smooth_SD * fs / 1000.)

    return {
        'time_pho': time,  # Time since aquisition start (ms).
        'gcamp': gcamp,  # GCaMP signal (Volts).
        'rfp': rfp,  # RFP signal (Volts).
        'TTL_times': TTL_times,  # Times when TTL signal went high (ms).
        'start_time':
        start_time,  # Absolute time of first sample (datetime object).
        'fs': fs
    }  # Sampling rate (Hz)
Пример #4
0
    def test_can_write_complex(self):
        input_complex64_data = np.array([1 + 2j, 3 + 4j], np.complex64)
        input_complex128_data = np.array([5 + 6j, 7 + 8j], np.complex128)

        complex64_segment = ChannelObject("group", "complex64_data",
                                          input_complex64_data)
        complex128_segment = ChannelObject("group", "complex128_data",
                                           input_complex128_data)

        output_file = BytesIO()
        with TdmsWriter(output_file) as tdms_writer:
            tdms_writer.write_segment([complex64_segment])
            tdms_writer.write_segment([complex128_segment])

        output_file.seek(0)
        tdms_file = TdmsFile(output_file)

        output_data = tdms_file.object("group", "complex64_data").data
        self.assertEqual(output_data.dtype, np.complex64)
        self.assertEqual(len(output_data), 2)
        self.assertEqual(output_data[0], input_complex64_data[0])
        self.assertEqual(output_data[1], input_complex64_data[1])

        output_data = tdms_file.object("group", "complex128_data").data
        self.assertEqual(output_data.dtype, np.complex128)
        self.assertEqual(len(output_data), 2)
        self.assertEqual(output_data[0], input_complex128_data[0])
        self.assertEqual(output_data[1], input_complex128_data[1])
Пример #5
0
    def test_can_read_tdms_file_properties_after_writing(self):
        test_time = datetime.utcnow()
        if pytz:
            test_time = test_time.replace(tzinfo=pytz.utc)

        a_segment = RootObject(properties={
            "prop1": "foo",
            "prop2": 3,
        })
        b_segment = GroupObject("group_name", properties={
            "prop3": 1.2345,
            "prop4": test_time,
        })

        output_file = BytesIO()
        with TdmsWriter(output_file) as tdms_writer:
            tdms_writer.write_segment([a_segment, b_segment])

        output_file.seek(0)
        tdms_file = TdmsFile(output_file)

        a_output = tdms_file.object()
        b_output = tdms_file.object("group_name")

        self.assertTrue("prop1" in a_output.properties, msg="prop1 not found")
        self.assertTrue("prop2" in a_output.properties, msg="prop2 not found")
        self.assertTrue("prop3" in b_output.properties, msg="prop3 not found")
        self.assertTrue("prop4" in b_output.properties, msg="prop4 not found")
        self.assertEqual(a_output.properties["prop1"], "foo")
        self.assertEqual(a_output.properties["prop2"], 3)
        self.assertEqual(b_output.properties["prop3"], 1.2345)
        self.assertEqual(b_output.properties["prop4"], test_time)
Пример #6
0
def ocop2df(filepath, ):
    file = TF(filepath)

    #find the group name that the normalized data is in
    normdata_groupname = None
    normdata_regex = "(.+_Norm)"
    for group in file.groups():
        m = re.search(normdata_regex, group)
        if m != None:
            normdata_groupname = m.groups()[0]
            break

    if (normdata_groupname == None):
        print('could not find Norm group in ' + filepath)
        return pd.DataFrame()

    df = file.object(normdata_groupname).as_dataframe()
    df.index = file.object('Global', "Wavelength").data
    indexarr = list(
        zip(*[
            file.object('Global', 'MP Pos').data,
            file.object('Global', 'Time').data
        ]))
    df.columns = pd.MultiIndex.from_tuples(indexarr,
                                           names=['MP', 'Wavelength'])
    return df


# filepath = "C:\\Labview Test Data\\2018-11-20\\UnspecifiedProj\\Run3\\Log_NIRQuest512_0_Case5_seed_0.tdms"
# # df = ocop2df(filepath)
# file = TF(filepath)
Пример #7
0
    def test_can_read_tdms_file_properties_after_writing(self):
        test_time = datetime.utcnow()
        if pytz:
            test_time = test_time.replace(tzinfo=pytz.utc)

        a_segment = RootObject(properties={
            "prop1": "foo",
            "prop2": 3,
        })
        b_segment = GroupObject("group_name",
                                properties={
                                    "prop3": 1.2345,
                                    "prop4": test_time,
                                })

        output_file = BytesIO()
        with TdmsWriter(output_file) as tdms_writer:
            tdms_writer.write_segment([a_segment, b_segment])

        output_file.seek(0)
        tdms_file = TdmsFile(output_file)

        a_output = tdms_file.object()
        b_output = tdms_file.object("group_name")

        self.assertTrue("prop1" in a_output.properties, msg="prop1 not found")
        self.assertTrue("prop2" in a_output.properties, msg="prop2 not found")
        self.assertTrue("prop3" in b_output.properties, msg="prop3 not found")
        self.assertTrue("prop4" in b_output.properties, msg="prop4 not found")
        self.assertEqual(a_output.properties["prop1"], "foo")
        self.assertEqual(a_output.properties["prop2"], 3)
        self.assertEqual(b_output.properties["prop3"], 1.2345)
        self.assertEqual(b_output.properties["prop4"], test_time)
Пример #8
0
def import_tdmsfile_to_tempodb(file_path, series_key_base=None):
    # Parse the TDMS file and get a handle to the object
    tdmsfile = TdmsFile(file_path)

    # Logging options
    show_properties = True
    show_data = False
    show_time = False
    import_data = True

    count = 0
    level = 0
    root = tdmsfile.object()
    display('/', level)
    if show_properties:
        display_properties(root, level)
    for group in tdmsfile.groups():
        level = 1
        group_obj = tdmsfile.object(group)
        display("%s" % group_obj.path, level)
        if show_properties:
            display_properties(group_obj, level)
        for channel in tdmsfile.group_channels(group):
            level = 2
            display("%s" % channel.path, level)
            if show_properties:
                level = 3
                display("data type: %s" % channel.data_type.name, level)
                display_properties(channel, level)

            if show_data:
                level = 3
                data = channel.data
                display("data: %s" % data, level)

            if show_time:
                level = 3
                time = channel.time_track()
                display("time: %s" % time, level)

            if import_data:
                level = 3
                try:
                    if series_key_base:
                        series_key = "%s-%i" % (series_key_base, count)
                        count += 1
                        # "Paul-Python-TDMS-1"
                    else:
                        # series_key_base = "%s-%s-%s" % os.path.basename(os.path.splitext(file_path))[0], group_obj.
                        series_key = channel.path

                    import_channel_to_tempodb(channel, series_key)

                except KeyError as ke:
                    display("There is no embedded time data in this channel.",
                            level)
                    print ke
            print
        print
Пример #9
0
def import_tdmsfile_to_tempodb(file_path, series_key_base=None):
    # Parse the TDMS file and get a handle to the object
    tdmsfile = TdmsFile(file_path)

    # Logging options
    show_properties = True
    show_data = False
    show_time = False
    import_data = True

    count = 0
    level = 0
    root = tdmsfile.object()
    display('/', level)
    if show_properties:
        display_properties(root, level)
    for group in tdmsfile.groups():
        level = 1
        group_obj = tdmsfile.object(group)
        display("%s" % group_obj.path, level)
        if show_properties:
            display_properties(group_obj, level)
        for channel in tdmsfile.group_channels(group):
            level = 2
            display("%s" % channel.path, level)
            if show_properties:
                level = 3
                display("data type: %s" % channel.data_type.name, level)
                display_properties(channel, level)

            if show_data:
                level = 3
                data = channel.data
                display("data: %s" % data, level)

            if show_time:
                level = 3
                time = channel.time_track()
                display("time: %s" % time, level)

            if import_data:
                level = 3
                try:
                    if series_key_base:
                        series_key = "%s-%i" % (series_key_base, count)
                        count += 1
                        # "Paul-Python-TDMS-1"
                    else:
                        # series_key_base = "%s-%s-%s" % os.path.basename(os.path.splitext(file_path))[0], group_obj.
                        series_key = channel.path

                    import_channel_to_tempodb(channel, series_key)

                except KeyError as ke:
                    display("There is no embedded time data in this channel.", level)
                    print ke
            print
        print
Пример #10
0
    def convert_to_df(self, debug=False):
        '''This function convert the content of the Files
        into a list. Each element of the list is a pandas
        dataframe with two columns: x,y.

        Input parameters:
        debug: a boolean. If False no debug text is printed. If
        True then debug informations are printed.
        data_files: a list with the list of the file names.
        Typically this is the value returned by the function
        generate_file_list().

        Return Value:
        1. A pandas dataframe with 3 columns:
            'data', 'groupName', and 'channelName'.
        2. the number of channels as integer.'''

        df = pd.DataFrame()

        data_files = [x for x in os.listdir(self.path) if x.endswith(".tdms")]

        for filename in data_files:
            tdms_file = TdmsFile(self.path + '/' + filename)

            if (debug):
                print("The following Groups and Channels are available:")
                for group in tdms_file.groups():
                    print(group)
                for channel in tdms_file.group_channels(group):
                    print(channel)

            s1 = pd.Series(tdms_file.object('Reference', 'Ramp_Output').data)

            # This DataFrame will contain the data and the name of group and
            # Channel.

            for group in tdms_file.groups():
                if (str(group) != 'Reference'):
                    for channel in tdms_file.group_channels(group):
                        channelName = TDMSConverter.get_channel_name(
                            self, channel)
                        if (debug):
                            print(">>>", str(group), '--', channelName)
                        s2 = pd.Series(
                            tdms_file.object(str(group), channelName).data)
                        df_data = pd.concat([s1, s2], axis=1)
                        df_data.columns = ['x', 'y']

                        df_tmp = pd.DataFrame({
                            "data": [df_data],
                            "groupName": [str(group)],
                            "channelName": [channelName],
                            "filename": [self.path + filename]
                        })
                        df = df.append(df_tmp)

        return df, df.shape[0]
Пример #11
0
def read_tdms(file_name):
    tdms_file = TdmsFile(file_name)

    readings = {}
    for i in range(1, 4):
        readings['V{}'.format(i)] = tdms_file.object('data',
                                                     'U{}'.format(i)).data
        readings['I{}'.format(i)] = tdms_file.object('data',
                                                     'I{}'.format(i)).data
    return readings
Пример #12
0
def ocop2df(filepath, ):
    file = TF(filepath)
    df = file.object(file.groups()[2]).as_dataframe()
    df.index = file.object('Global', "Wavelength").data
    indexarr = list(
        zip(*[
            file.object('Global', 'MP Pos').data,
            file.object('Global', 'Time').data
        ]))
    df.columns = pd.MultiIndex.from_tuples(indexarr,
                                           names=['MP', 'Wavelength'])
    return df
Пример #13
0
def LoadGradData(filenum='0000'):
	'''loads data from TMDS file'''
	fpath=GetCompEnvGrad();
	
	fname=fpath+'CommonCalib'+str(filenum)+'.tdms'
	
	tdms_file=TdmsFile(fname)
	Ch1 = tdms_file.object('MagCalib','Ch1'); Data1 = Ch1.data;
	Ch2 = tdms_file.object('MagCalib','Ch2'); Data2 = Ch2.data;
	Ch3 = tdms_file.object('MagCalib','Ch3'); Data3 = Ch3.data;
	Ch4 = tdms_file.object('MagCalib','Ch4'); Data4 = Ch4.data;
	
	time=Ch1.time_track()
	
	return [Data1,Data2,Data3,Data4,time]
Пример #14
0
    def test_can_write_timestamp_data(self):
        tzinfo = None
        if pytz:
            tzinfo = pytz.utc
        input_data = [
            datetime(2017, 7, 9, 12, 35, 0, 0, tzinfo),
            datetime(2017, 7, 9, 12, 36, 0, 0, tzinfo),
            datetime(2017, 7, 9, 12, 37, 0, 0, tzinfo),
            ]

        segment = ChannelObject("group", "timedata", input_data)

        output_file = BytesIO()
        with TdmsWriter(output_file) as tdms_writer:
            tdms_writer.write_segment([segment])

        output_file.seek(0)
        tdms_file = TdmsFile(output_file)

        output_data = tdms_file.object("group", "timedata").data

        self.assertEqual(len(output_data), 3)
        self.assertEqual(output_data[0], input_data[0])
        self.assertEqual(output_data[1], input_data[1])
        self.assertEqual(output_data[2], input_data[2])
Пример #15
0
    def test_can_write_numpy_timestamp_data_with_dates(self):
        tzinfo = None
        if pytz:
            tzinfo = pytz.utc
        input_data = np.array([
            '2017-07-09',
            '2017-07-09',
            '2017-07-09'], dtype='datetime64')

        segment = ChannelObject("group", "timedata", input_data)

        output_file = BytesIO()
        with TdmsWriter(output_file) as tdms_writer:
            tdms_writer.write_segment([segment])

        output_file.seek(0)
        tdms_file = TdmsFile(output_file)

        output_data = tdms_file.object("group", "timedata").data

        self.assertEqual(len(output_data), 3)
        self.assertEqual(
            output_data[0], datetime(2017, 7, 9, 0, 0, 0, 0, tzinfo))
        self.assertEqual(
            output_data[1], datetime(2017, 7, 9, 0, 0, 0, 0, tzinfo))
        self.assertEqual(
            output_data[2], datetime(2017, 7, 9, 0, 0, 0, 0, tzinfo))
Пример #16
0
    def test_can_append_to_file_using_path(self):
        input_1 = np.linspace(0.0, 1.0, 10)
        input_2 = np.linspace(1.0, 2.0, 10)
        segment_1 = ChannelObject("group", "a", input_1)
        segment_2 = ChannelObject("group", "a", input_2)

        tempdir = tempfile.mkdtemp()
        temppath = "%s/test_file.tdms" % tempdir
        try:
            with TdmsWriter(temppath) as tdms_writer:
                tdms_writer.write_segment([segment_1])
            with TdmsWriter(temppath, 'a') as tdms_writer:
                tdms_writer.write_segment([segment_2])

            tdms_file = TdmsFile(temppath)

            output = tdms_file.object("group", "a").data

            self.assertEqual(len(output), 20)
            np.testing.assert_almost_equal(output,
                                           np.concatenate([input_1, input_2]))

        finally:
            if os.path.exists(temppath):
                os.remove(temppath)
            os.rmdir(tempdir)
def readFiles():
    files = os.listdir(path)
    files.sort()

    for filename in files:
        if filename.endswith(".tdms"):
            #print(filename)
            tdms_file = TdmsFile(path + "/" + filename)
            channel = tdms_file.object("Untitled", "Canale 4")
            data = channel.data
            fftSpectrum, peakSpectrum, mean, stdDeviation, slope = evaluateFeature(
                data)
            resFiles.append(cleanFilename(filename))
            resFFTSpectrum.append(fftSpectrum)
            resPeakSpectrum.append(peakSpectrum)
            resMean.append(mean)
            resStdDeviation.append(stdDeviation)
            resSlope.append(slope)
            #mean, mean1 = avg(data)
            #print("Filename - " + filename + " Mean: " + str(mean))
    saveResults(resFiles, "featureObtained/Date.p")
    saveResults(resFFTSpectrum, "featureObtained/FFTSpectrum.p")
    saveResults(resPeakSpectrum, "featureObtained/PeakSpectrum.p")
    saveResults(resMean, "featureObtained/Mean.p")
    saveResults(resStdDeviation, "featureObtained/StdDeviation.p")
    saveResults(resSlope, "featureObtained/Slope.p")
Пример #18
0
def readTDMS(path, acqNum, channelName='PXI1Slot7/ai0', tdms_file=None):

    dataOut = {}

    if tdms_file == None:
        #Load the file
        tdms_file = TdmsFile(path)

#Get the number of groups (or entries)
    groups = tdms_file.groups()

    GroupName = str(acqNum)

    #Extract the data
    channel = tdms_file.object(GroupName, channelName)

    #Get the data
    data = channel.data
    data = data.astype(np.float)

    #Get the meta data
    timeStep = channel.property('wf_increment')
    numSamples = channel.property('wf_samples')

    #Store data
    #dataOut['Acq'+str(acqNum)]=data
    #dataOut['Time']=np.arange(0,numSamples*timeStep,timeStep)

    #Convert to dataframe, cause it IS better
    newData = data

    return newData
Пример #19
0
def get_video_metadata(videotdms, metadatatdms):
    """
    Gets metadata about the video to be converted. These include fps, width and height of 
    frame and total number of frames in video.

    :param videotdms: path to video .tdms
    :param metadatdms: path to metadata .tdms
    :returns: a dictionary with the metadata and an integer with number of frames to convert
    :raises ValueError: if there's a mismatch between expected and reported number of frames
    """
    print(" extracting metadata from: ", metadatatdms)

    # Load metadata
    metadata = TdmsFile(metadatatdms)

    # Get values to return
    metadata_object = metadata.object()
    props = {n: v
             for n, v in metadata_object.properties.items()}  # fps, width, ...
    videosize = os.path.getsize(videotdms)
    # Check how many frames are in the video given frame size and # bites in video file
    #if props['width'] > 0:
    # Get size of video to be converted
    #    videosize = os.path.getsize(videotdms)
    #    tot = np.int(round(videosize/(props['width']*props['height'])))  # tot number of frames
    #    if tot != props['last']:
    #        raise ValueError('Calculated number of frames doesnt match what is stored in the metadata: {} vs {}'.format(tot, props['last']))
    #else:
    #    tot = 0

    return props  #, tot
Пример #20
0
def tdms2csvTimingMetaData(tdmsFolderName):
    tdmsFiles=[]
    tdmsFolderNameFull=originTDMSFolder+'/'+tdmsFolderName;
    
    if not os.path.exists(tdmsFolderNameFull):
        return
    
    for file in os.listdir(tdmsFolderNameFull):
        if file.endswith(".tdms"):
              tdmsFiles+= [file]
    #%% Check if the destination foldername exists
    destinationFolderName=destinationCSVFolder+'/'+tdmsFolderName;
    
    if not os.path.exists(destinationFolderName):
        os.makedirs(destinationFolderName)    
             
    #%%
    directoryIndex=1;
    for file in tdmsFiles: 
        print(file)         
        tdms_file = TdmsFile(tdmsFolderNameFull+'/'+file)
        
        directory=destinationCSVFolder+'/'+tdmsFolderName+'/data_'+str(directoryIndex);
        directoryIndex+=1;
        
        if not os.path.exists(directory):
            os.makedirs(directory) 
        
        for channelName in channelNames:
            channel = tdms_file.object('data',channelName)
            startTime=channel.property('wf_start_time')
            samplingIncrement=channel.property('wf_increment')
            np.savetxt(directory+'/timingMetaData.csv', [startTime.hour,startTime.minute,startTime.second,startTime.microsecond, samplingIncrement], delimiter=',')
Пример #21
0
    def test_can_write_timestamp_data(self):
        tzinfo = None
        if pytz:
            tzinfo = pytz.utc
        input_data = [
            datetime(2017, 7, 9, 12, 35, 0, 0, tzinfo),
            datetime(2017, 7, 9, 12, 36, 0, 0, tzinfo),
            datetime(2017, 7, 9, 12, 37, 0, 0, tzinfo),
            ]

        segment = ChannelObject("group", "timedata", input_data)

        output_file = BytesIO()
        with TdmsWriter(output_file) as tdms_writer:
            tdms_writer.write_segment([segment])

        output_file.seek(0)
        tdms_file = TdmsFile(output_file)

        output_data = tdms_file.object("group", "timedata").data

        self.assertEqual(len(output_data), 3)
        self.assertEqual(output_data[0], input_data[0])
        self.assertEqual(output_data[1], input_data[1])
        self.assertEqual(output_data[2], input_data[2])
Пример #22
0
 def Read_Tdms(self):
     Data_List = []
     tdmsfile = TdmsFile(self.filename)
     for name in self.listforname:
         Data_List.append(tdmsfile.object(self.group,name).data)
     Dict_temp = dict(list(zip(self.listforname,Data_List)))
     return Dict_temp
Пример #23
0
    def test_can_write_timestamp_data_with_datetimes(self):
        input_data = [
            datetime(2017, 7, 9, 12, 35, 0),
            datetime(2017, 7, 9, 12, 36, 0),
            datetime(2017, 7, 9, 12, 37, 0)
        ]
        expected_data = np.array([
            '2017-07-09T12:35:00', '2017-07-09T12:36:00', '2017-07-09T12:37:00'
        ],
                                 dtype='datetime64')

        segment = ChannelObject("group", "timedata", input_data)

        output_file = BytesIO()
        with TdmsWriter(output_file) as tdms_writer:
            tdms_writer.write_segment([segment])

        output_file.seek(0)
        tdms_file = TdmsFile(output_file)

        output_data = tdms_file.object("group", "timedata").data

        self.assertEqual(len(output_data), 3)
        self.assertEqual(output_data[0], expected_data[0])
        self.assertEqual(output_data[1], expected_data[1])
        self.assertEqual(output_data[2], expected_data[2])
Пример #24
0
    def test_can_write_numpy_timestamp_data_with_dates(self):
        tzinfo = None
        if pytz:
            tzinfo = pytz.utc
        input_data = np.array([
            '2017-07-09',
            '2017-07-09',
            '2017-07-09'], dtype='datetime64')

        segment = ChannelObject("group", "timedata", input_data)

        output_file = BytesIO()
        with TdmsWriter(output_file) as tdms_writer:
            tdms_writer.write_segment([segment])

        output_file.seek(0)
        tdms_file = TdmsFile(output_file)

        output_data = tdms_file.object("group", "timedata").data

        self.assertEqual(len(output_data), 3)
        self.assertEqual(
            output_data[0], datetime(2017, 7, 9, 0, 0, 0, 0, tzinfo))
        self.assertEqual(
            output_data[1], datetime(2017, 7, 9, 0, 0, 0, 0, tzinfo))
        self.assertEqual(
            output_data[2], datetime(2017, 7, 9, 0, 0, 0, 0, tzinfo))
Пример #25
0
    def test_can_append_to_file_using_path(self):
        input_1 = np.linspace(0.0, 1.0, 10)
        input_2 = np.linspace(1.0, 2.0, 10)
        segment_1 = ChannelObject("group", "a", input_1)
        segment_2 = ChannelObject("group", "a", input_2)

        tempdir = tempfile.mkdtemp()
        temppath = "%s/test_file.tdms" % tempdir
        try:
            with TdmsWriter(temppath) as tdms_writer:
                tdms_writer.write_segment([segment_1])
            with TdmsWriter(temppath, 'a') as tdms_writer:
                tdms_writer.write_segment([segment_2])

            tdms_file = TdmsFile(temppath)

            output = tdms_file.object("group", "a").data

            self.assertEqual(len(output), 20)
            np.testing.assert_almost_equal(
                output, np.concatenate([input_1, input_2]))

        finally:
            if os.path.exists(temppath):
                os.remove(temppath)
            os.rmdir(tempdir)
Пример #26
0
def LoadGradData4(date='2016.01.01',filenum='0000'):
	'''loads 4-channel data from TMDS file'''
	fpath=GetCompEnvGrad();
	
	fname=fpath+date+'\\CommonCalib'+str(filenum)+'.tdms'
	
	tdms_file=TdmsFile(fname)
	Ch1 = tdms_file.object('MagCalib','Ch1'); Data1 = Ch1.data;
	Ch2 = tdms_file.object('MagCalib','Ch2'); Data2 = Ch2.data;
	Ch3 = tdms_file.object('MagCalib','Ch3'); Data3 = Ch3.data;
	Ch4 = tdms_file.object('MagCalib','Ch4'); Data4 = Ch4.data;
	ChRef = tdms_file.object('MagCalib','ChRef'); DataRef= ChRef.data;
	
	time=Ch1.time_track()
	
	return [DataRef,Data1,Data2,Data3,Data4,time] #such that the reference is index 0, Channel 1 is index 1, Channel 2 is index 2, etc
Пример #27
0
def read_tdms(file_name):
    """This function reads in a color plot tdms file from Demon Voltammetry export options and returns the 
    color plot as a 2-D Numpy Array with background subtracted. The background is the first CV.

    X-axis of array is time, Y-axis is an individual command voltage recording.

    Keyword Arg is the tdms file that you wish to analyze. 
    """

    # Access file as object
    tdms_file = TdmsFile(file_name)
    root_object = tdms_file.object()

    # extracting the number of collections in a recording from the tdms file properties
    num_collections = (root_object.property("Collection Duration (s)") *
                       root_object.property("Collection Frequency (Hz)"))
    num_collections

    # Populating the color plot lists with all of the collections from the recording
    color_plot_lists = []
    for i in range(0, int(num_collections)):
        command_voltage = tdms_file.object('Data1', '%s' % i)
        command_voltage = command_voltage.data
        color_plot_lists.append(command_voltage)

    # Transforming the list of lists into a NumPy 2-D array and transposing it so time is the x-axis
    color_plot_array = np.array(color_plot_lists).transpose()
    # Check to make sure that the first column is a CV
    #plt.plot(color_plot_array[:,0])

    # Perform background subtraction
    back_sub_array = []
    initial_cv = color_plot_array[:, 0]

    for column in color_plot_array.transpose():
        back_sub_array.append(column - initial_cv)

    color_plot_array = np.array(back_sub_array).transpose()

    # Apply Butterworth Filter
    b, a = signal.butter(4, 0.03, analog=False)
    butter = []
    for column in color_plot_array.transpose():
        butter.append(signal.filtfilt(b, a, column))
    final_color_plot_array = np.array(butter).transpose()

    return final_color_plot_array
Пример #28
0
def read_tdms(fn):

    tdms_file = TdmsFile(fn)

    try:
        times = np.array(
            [[channel.data
              for channel in tdms_file.group_channels(group)
              if channel.data is not None]
             for group in tdms_file.groups()
             if group.lower() == "time"][0][0])
        dt = np.mean(np.diff(times))
    except IndexError:
        if not "Sampling Rate" in tdms_file.object().properties.keys():
            if not "Sampling Rate(AI)" in tdms_file.object().properties.keys():
                dt = 1.0
            else:
                sr = float(tdms_file.object().properties['Sampling Rate(AI)'])
                if sr > 0:
                    dt = 1e3/sr
                else:
                    dt = 1.0/25.0
        else:
            sr = float(tdms_file.object().properties['Sampling Rate'])
            if sr > 0:
                dt = 1e3/sr
            else:
                dt = 1.0/25.0

    yunits = tdms_file.object().properties['Units']
    try:
        meta = tdms_file.group_channels('Meta')
    except:
        meta = ''

    recording = {group: [
        channel.data
        for channel in tdms_file.group_channels(group)
        if channel.data is not None]
                 for group in tdms_file.groups()}
    recording["dt"] = dt
    recording["yunits"] = yunits
    recording["holding"] = meta

    return recording
    def plot(self):
        ''' plot some random stuff '''
        # random data
        # data = [random.random() for i in range(10)]
        # tdms data

        tdms_file = TdmsFile("t1.tdms")

        tdms_groups = tdms_file.groups()
        data_array = []
        for grp in tdms_groups:
            for ch in reversed(tdms_file.group_channels(grp)):
                temp = str(ch).split('\'')
                # print((temp[1]), (temp[3]), ch)
                temp_obj = tdms_file.object(temp[1], temp[3])
                data_array.append(temp_obj.data)
                # ax = self.figure.add_subplot()
                # ax.clear()
                # ax.plot(temp_obj.data)
                # # getattr(self, "self.canvas%s.draw" % str(len(data_array)))()
                # self.canvas1.draw()
        data_array = np.asarray(data_array)

        ax1 = self.figure.add_subplot()
        ax1.clear()
        ax1.plot(data_array[0])
        self.canvas1.draw()

        ax2 = self.figure.add_subplot()
        ax2.clear()
        ax2.plot(data_array[1])
        self.canvas2.draw()

        ax3 = self.figure.add_subplot()
        ax3.clear()
        ax3.plot(data_array[2])
        self.canvas3.draw()

        ax4 = self.figure.add_subplot()
        ax4.clear()
        ax4.plot(data_array[3])
        self.canvas4.draw()

        ax5 = self.figure.add_subplot()
        ax5.clear()
        ax5.plot(data_array[4])
        self.canvas5.draw()

        ax6 = self.figure.add_subplot()
        ax6.clear()
        ax6.plot(data_array[5])
        self.canvas6.draw()

        ax7 = self.figure.add_subplot()
        ax7.clear()
        ax7.plot(data_array[6])
        self.canvas7.draw()
Пример #30
0
def main():
    # this script is used for testing and give feedback to training set
    # =========Step 1 read in data============
    # load in data, take the TDMS data type as example
    tdms_file = TdmsFile(
        "/media/sherry/新加卷/ubuntu/WZL-2018/Feintool Daten/FW-1-1/new material/AKF_SS-FW2-H04521-H05000.tdms"
    )
    # tdms_file.groups()
    df_all = tdms_file.object('Untitled').as_dataframe()
    df_force = df_all['Stempel_1 (Formula Result)']
    df_stroke = df_all['Position_Ma']
    # sample time series data
    df_f = df_force[80800:99000].reset_index(drop=True)
    df_s = df_stroke[80800:99000].reset_index(drop=True)

    # the training data read in
    segmentations = read_from_file()

    # =========step 2: extract the hub ===========
    # Extract all punches of the dataset
    SEH = npF.SegHub()
    df_punches_t = SEH.extract_hub(df_f, df_s)
    df_punches_t = df_punches_t.reset_index(drop=True)

    # =========Step 3: segmentation into trends=========
    punch_seg = SEH.segment_and_plot(df_punches_t[0].dropna(), 'l2')
    sub_punch_seg = SEH.segment_and_plot(punch_seg[4].dropna(), 'rbf', 0, 4)
    punch_seg[4] = sub_punch_seg[0]
    punch_seg[5] = sub_punch_seg[1]
    punch_seg[6] = sub_punch_seg[2]
    punch_seg[7] = sub_punch_seg[3]

    # =========Step 4: classification=========
    for i in range(0, 8):
        print("Trend:" + str(i + 1))
        s = SEH.Uniformation(punch_seg[i])
        clusters = pd.read_csv("cluster_" + str(i) + ".csv")
        data_train = SEH.Uniformation(segmentations[i])
        row, col = data_train.shape
        col = min(len(s), col)
        print("Result:.........")
        s = s[0:col]
        test = pd.DataFrame([s, s])
        data_train = data_train.iloc[:, 0:col]
        # generate new clusters and save into the file
        # you cannot direct use xxx = yyyy for tables
        new_dataset = data_train.copy()
        new_dataset.loc[row] = s.values
        z = hac.linkage(new_dataset, 'ward')
        result = SEH.print_clusters(data_train, z, 3, plot=False)
        pd.DataFrame(result).to_csv("cluster_" + str(i) + ".csv", index=False)

        SEH.classifier(data_train, clusters, test, 3)

    #==========Step 5: save the newly added file==========
    save_newdata(punch_seg)
 def loadTDMSImages(self, file):
     tdms_file = TdmsFile(file)
     p = tdms_file.object().properties
     self.dimx = int(p['dimx'])
     self.dimy = int(p['dimy'])
     self.binning = int(p['binning'])
     self.frames = int(p['dimz'])
     self.exposure = float(p['exposure'].replace(',', '.'))
     images = tdms_file.channel_data('Image', 'Image')
     return images.reshape(self.frames, self.dimx, self.dimy)
Пример #32
0
def getlaserdata():
    #pull the laser profile from a specific tdms file. The format of this tdms file is likely only used once
    pathnames_laser = _get_pathnames(
        "C:\\Users\\aspit\\OneDrive\\Data\\LaserProfile")
    file_laser = TF(pathnames_laser['Test1_20Hz.tdms'])

    laser_common = file_laser.object('Raw').as_dataframe()
    laser_data = file_laser.object('Average').as_dataframe()

    laser_time = laser_common['Time1']
    laser_data = laser_data['Mean']
    laser_data_norm = laser_data / laser_data.max()
    #offset_time = 870
    offset_time = 35
    laser_time_off = laser_time - offset_time

    laserseries = pd.Series(laser_data_norm.values, index=laser_time_off)

    return laserseries
Пример #33
0
    def test_can_write_tdms_objects_read_from_file(self):
        group_segment = GroupObject("group", properties={"prop1": "bar"})
        input_data = np.linspace(0.0, 1.0, 10)
        channel_segment = ChannelObject("group",
                                        "a",
                                        input_data,
                                        properties={
                                            "prop1": "foo",
                                            "prop2": 3,
                                        })

        tempdir = tempfile.mkdtemp()
        temppath = "%s/test_file.tdms" % tempdir
        try:
            with TdmsWriter(temppath) as tdms_writer:
                tdms_writer.write_segment([group_segment, channel_segment])

            tdms_file = TdmsFile(temppath)
            read_group = tdms_file.object("group")
            read_channel = tdms_file.object("group", "a")

            with TdmsWriter(temppath) as tdms_writer:
                tdms_writer.write_segment([read_group, read_channel])

            tdms_file = TdmsFile(temppath)
            read_group = tdms_file.object("group")
            read_channel = tdms_file.object("group", "a")

            self.assertFalse(read_group.has_data)
            self.assertEqual(read_group.properties["prop1"], "bar")

            self.assertEqual(len(read_channel.data), 10)
            np.testing.assert_almost_equal(read_channel.data, input_data)
            self.assertEqual(read_channel.properties["prop1"], "foo")
            self.assertEqual(read_channel.properties["prop2"], 3)

        finally:
            if os.path.exists(temppath):
                os.remove(temppath)
            os.rmdir(tempdir)
Пример #34
0
    def read(self):
        # File information below
        tdms_file = TdmsFile(fname + '.tdms')  # Reads a tdms file.
        root_object = tdms_file.object()  # tdms file information

        for name, value in root_object.properties.items():
            print("{0}: {1}".format(name, value))

        group_name = "Trap"  # Get the group name
        channels = tdms_file.group_channels(
            group_name)  # Get the channel object
        self.channel_num = len(channels)  # Channel number
        self.channel_name = [
            str(channels[i].channel) for i in range(len(channels))
        ]  # Channel name
        self.dt = channels[0].properties[u'wf_increment']  # Sampling time
        self.fs = int(1.0 / self.dt)  # Sampling frequency
        self.N = len(channels[0].time_track())

        print("Channel number: %d" % self.channel_num)
        print("Channel name: %s" % self.channel_name)
        print("Sampling rate: %d Hz" % self.fs)
        print("Data size: %d sec \n" % int(self.N * self.dt))

        # Read data
        print("Reading raw data ... \n")
        self.t = channels[0].time_track()
        self.QPDx = (channels[0].data -
                     np.mean(channels[0].data)) * QPD_nm2V[0]
        self.QPDy = (channels[1].data -
                     np.mean(channels[1].data)) * QPD_nm2V[1]
        self.QPDs = channels[2].data
        self.PZTx = (channels[3].data -
                     np.mean(channels[3].data)) * PZT_nm2V[0]
        self.PZTy = (channels[4].data -
                     np.mean(channels[4].data)) * PZT_nm2V[1]
        self.PZTz = (channels[5].data -
                     np.mean(channels[5].data)) * PZT_nm2V[2]
        self.MTAx = (channels[6].data - channels[6].data[0]) * MTA_nm2V[0]
        self.MTAy = (channels[7].data - channels[7].data[0]) * MTA_nm2V[1]
        self.Fx = self.QPDx * stiffness_pN2nm[0]
        self.Fy = self.QPDy * stiffness_pN2nm[1]

        # Make a directory to save the results
        self.path_data = os.getcwd()
        self.path_save = os.path.join(self.path_data, fname)

        if os.path.exists(self.path_save):
            shutil.rmtree(self.path_save)
            os.makedirs(self.path_save)
        else:
            os.makedirs(self.path_save)
Пример #35
0
    def test_can_write_tdms_objects_read_from_file(self):
        group_segment = GroupObject("group", properties={
            "prop1": "bar"
        })
        input_data = np.linspace(0.0, 1.0, 10)
        channel_segment = ChannelObject("group", "a", input_data, properties={
            "prop1": "foo",
            "prop2": 3,
        })

        tempdir = tempfile.mkdtemp()
        temppath = "%s/test_file.tdms" % tempdir
        try:
            with TdmsWriter(temppath) as tdms_writer:
                tdms_writer.write_segment([group_segment, channel_segment])

            tdms_file = TdmsFile(temppath)
            read_group = tdms_file.object("group")
            read_channel = tdms_file.object("group", "a")

            with TdmsWriter(temppath) as tdms_writer:
                tdms_writer.write_segment([read_group, read_channel])

            tdms_file = TdmsFile(temppath)
            read_group = tdms_file.object("group")
            read_channel = tdms_file.object("group", "a")

            self.assertFalse(read_group.has_data)
            self.assertEqual(read_group.properties["prop1"], "bar")

            self.assertEqual(len(read_channel.data), 10)
            np.testing.assert_almost_equal(read_channel.data, input_data)
            self.assertEqual(read_channel.properties["prop1"], "foo")
            self.assertEqual(read_channel.properties["prop2"], 3)

        finally:
            if os.path.exists(temppath):
                os.remove(temppath)
            os.rmdir(tempdir)
Пример #36
0
def import_shot(fname, params):
    """
    This function loads in the noted parameters for a given shot. 
    B and B dot may or may not be sampled at a lower frequnecy. 
    Therefore, NaNs are removed on import to make this clear.
    This function takes a tmds file as its input.
    
    Arguments:
        fname: path to TDMS data
        params: list of tuples strings of variable names to import with their 
                appropriate gains
    """
    shot = dict()
    shot['params'] = params

    tdms_file = TdmsFile(fname)
    for p, g in params:
        if p == 'B':
            shot[p] = tdms_file.object('p', 'Field_fixed').data / g
        else:
            shot[p] = tdms_file.object('p', p).data / g
    return shot
Пример #37
0
    def load_trace(mname):
        """Loads the traces and returns them as a dictionary

        Currently, only loading traces from tdms files is supported.
        This forces us to load the full tdms file into memory which
        takes some time.
        """
        tname = TraceColumn.find_trace_file(mname)

        # Initialize empty trace dictionary
        trace = {}

        if tname is None:
            pass
        elif tname.suffix == ".tdms":
            # Again load the measurement tdms file.
            # This might increase memory usage, but it is cleaner
            # when looking at code structure.
            mdata = TdmsFile(str(mname))
            sampleids = mdata.object("Cell Track", "FL1index").data

            # Load the trace data. The traces file is usually larger than the
            # measurement file.
            tdata = TdmsFile(str(tname))
            for trace_key in dfn.FLUOR_TRACES:
                group, ch = naming.tr_data_map[trace_key]
                try:
                    trdat = tdata.object(group, ch).data
                except KeyError:
                    pass
                else:
                    if trdat is not None and trdat.size != 0:
                        # Only add trace if there is actual data.
                        # Split only needs the position of the sections,
                        # so we remove the first (0) index.
                        trace[trace_key] = np.split(trdat, sampleids[1:])
        return trace
Пример #38
0
        def _load(self, filename=None, *args, **kargs):
            """TDMS file loader routine.

            Args:
                filename (string or bool): File to load. If None then the existing filename is used,
                    if False, then a file dialog will be used.

            Returns:
                A copy of the itself after loading the data.
            """
            if filename is None or not filename:
                self.get_filename("r")
            else:
                self.filename = filename
            # Open the file and read the main file header and unpack into a dict
            try:
                f = TdmsFile(self.filename)

                column_headers = []
                data = np.array([])

                for grp in f.objects.keys():
                    if grp == "/":
                        pass  # skip the rooot group
                    elif grp == "/'TDI Format 1.5'":
                        metadata = f.object("TDI Format 1.5")
                        for k, v in metadata.properties.items():
                            self.metadata[k] = self.metadata.string_to_type(str(v))
                    else:
                        if f.objects[grp].has_data:
                            chnl = grp.split("/")[-1]
                            chnl.strip().strip("'")
                            column_headers.append(chnl)
                            if data.size == 0:
                                data = f.objects[grp].data
                            else:
                                data = np.column_stack([data, f.objects[grp].data])
                self.data = data
                self.column_headers = column_headers
            except Exception:
                from traceback import format_exc

                raise Core.StonerLoadError("Not a TDMS File \n{}".format(format_exc()))

            return self
Пример #39
0
def convert_tdms(fileName,tempo,env):
    if tempo:    
        time.sleep(20)    
    path=env.path
    tdms_file=TdmsFile(os.path.join(path,fileName+'.tdms'))
   # tdms_file=TdmsFile(r'D:\DATA\00838_Data.tdms')
    hdf5=h5py.File(path+os.sep+fileName+'.h5','w')
    #channel=tdms_file.object('PXI M6251','Lang_U')
    #group=tdms_file.object('PXI M6251')
    grouplist=tdms_file.groups()
    #print grouplist
    for i in grouplist:
        group=tdms_file.object(i)
        grouph=hdf5.create_group(i)
        print group.path
        if group.path=='/\'PXI M6251\'':
            nbchannels=group.properties['Nchannel']
            tstart=group.properties['Tstart']
            sampling=group.properties['SampleTime']
        if group.path=='/\'Tektronix\'':    
            tstart=group.properties['Tstart']
            #sampling=group.properties['SampleTime']
            sampling=1/1.25e9
            nbchannels=group.properties['Nchannel']
        if group.path=='/\'S7\'':
            nbchannels=group.properties['Nchannel']
            tstart=0.
            sampling=1.
        #print nbchannels,tstart,sampling
        grouph.attrs['Nchannel']=nbchannels
        grouph.attrs['Tstart']=tstart
        grouph.attrs['sampling']=1/float(sampling)
        liste=tdms_file.group_channels(i)
    
        for j in liste:
            grouph.create_dataset(re.sub('[\']','',j.path),data=j.data,compression="gzip")
            
#    conn=sqlite3.connect('ishtar')
#    curs=conn.cursor()
#    curs.execute('insert into shots values(?,?,?,?,?)',(int(fileName[0:-5]),fileName,0.,0.,0.))
#    conn.commit()
    hdf5.create_group('Process')
    hdf5.close()
    env.process.addFile(fileName)
Пример #40
0
    def _init_data_with_tdms(self, tdms_filename):
        """Initializes the current RT-DC dataset with a tdms file.
        """
        tdms_file = TdmsFile(str(tdms_filename))
        # time is always there
        table = "Cell Track"
        # Edit naming.dclab2tdms to add features
        for arg in naming.tdms2dclab:
            try:
                data = tdms_file.object(table, arg).data
            except KeyError:
                pass
            else:
                if data is None or len(data) == 0:
                    # Ignore empty features. npTDMS treats empty
                    # features in the following way:
                    # - in nptdms 0.8.2, `data` is `None`
                    # - in nptdms 0.9.0, `data` is an array of length 0
                    continue
                self._events[naming.tdms2dclab[arg]] = data

        # Set up configuration
        tdms_config = Configuration(
            files=[self.path.with_name(self._mid + "_para.ini"),
                   self.path.with_name(self._mid + "_camera.ini")],
        )
        dclab_config = Configuration()
        for section in naming.configmap:
            for pname in naming.configmap[section]:
                meta = naming.configmap[section][pname]
                typ = dfn.config_funcs[section][pname]
                if isinstance(meta, tuple):
                    osec, opar = meta
                    if osec in tdms_config and opar in tdms_config[osec]:
                        val = tdms_config[osec].pop(opar)
                        dclab_config[section][pname] = typ(val)
                else:
                    dclab_config[section][pname] = typ(meta)

        self.config = dclab_config
        self._complete_config_tdms(tdms_config)

        self._init_filters()
Пример #41
0
    def test_can_write_string_data(self):
        input_data = [
            "hello world",
            u"\u3053\u3093\u306b\u3061\u306f\u4e16\u754c"]

        segment = ChannelObject("group", "string_data", input_data)

        output_file = BytesIO()
        with TdmsWriter(output_file) as tdms_writer:
            tdms_writer.write_segment([segment])

        output_file.seek(0)
        tdms_file = TdmsFile(output_file)

        output_data = tdms_file.object("group", "string_data").data

        self.assertEqual(len(output_data), 2)
        self.assertEqual(output_data[0], input_data[0])
        self.assertEqual(output_data[1], input_data[1])
Пример #42
0
    def parameterSearch(self,TDMSPath):
        config = ConfigParser.ConfigParser()
        config.read(u'config.txt')
        
        #This section reads the needed information from the config file 
        startFreq = config.getint(u'Frequencies',u'Start Frequency')
        stopFreq = config.getint(u'Frequencies',u'Stop Frequency')
        stepFreq = config.getint(u'Frequencies',u'Step Frequency')
        NUM_POINTS = config.getint(u'Symbolic Constants',u'Num Points')
        JUMP_BACK = config.getint(u'Symbolic Constants',u'Jump Back')
        
        TDMS_Time = []
        TDMS_Data = []
        TDMSfiles = os.listdir(TDMSPath)
        TDMSfiles = [file for file in TDMSfiles if file.endswith(u'.tdms')]
        for file in TDMSfiles: #Loop through all TDMS files in order to get the data within each
            path = TDMSPath + u'/' + file
            TDMS = TdmsFile(path) #Function that reads the specific TDMS file
            group = TDMS.groups()[0]
            channel = TDMS.object(group, u'Dev1/ai0') #returns a channel type
            data = channel.data
            time = channel.time_track()
            
            #Determining starting point to read file
            #if highest point is more than JUMP_BACK points into the data, start there
            if numpy.argmax(data)>JUMP_BACK:
                start = numpy.argmax(data)-JUMP_BACK
            else: start = 0 #otherwise start at the beginning of the file

            t = time[start:start+NUM_POINTS]  #time information from the start point to the end point
            s = data[start:start+NUM_POINTS] #data from the start point to the end point

            TDMS_Time.append(t) #add the TDMS files data to the set of all TDMS files data
            TDMS_Data.append(s)

        #Now that the data has all been found it can be set using the InputData class

        InputData.Set_Start_Freq(startFreq)
        InputData.Set_Stop_Freq(stopFreq)
        InputData.Set_Step_Freq(stepFreq)
        InputData.Set_TDMS_Time(TDMS_Time)
        InputData.Set_TDMS_Data(TDMS_Data)
Пример #43
0
    def test_can_write_multiple_segments(self):
        input_1 = np.linspace(0.0, 1.0, 10)
        input_2 = np.linspace(2.0, 3.0, 10)

        segment_1 = ChannelObject("group", "a", input_1)
        segment_2 = ChannelObject("group", "a", input_2)

        output_file = BytesIO()
        with TdmsWriter(output_file) as tdms_writer:
            tdms_writer.write_segment([segment_1])
            tdms_writer.write_segment([segment_2])

        output_file.seek(0)
        tdms_file = TdmsFile(output_file)

        output_data = tdms_file.object("group", "a").data

        expected_data = np.append(input_1, input_2)
        self.assertEqual(len(output_data), len(expected_data))
        self.assertTrue((output_data == expected_data).all())
Пример #44
0
    def test_can_read_tdms_file_after_writing(self):
        a_input = np.linspace(0.0, 1.0, 100)
        b_input = np.linspace(0.0, 100.0, 100)

        a_segment = ChannelObject("group", "a", a_input)
        b_segment = ChannelObject("group", "b", b_input)

        output_file = BytesIO()
        with TdmsWriter(output_file) as tdms_writer:
            tdms_writer.write_segment([a_segment, b_segment])

        output_file.seek(0)
        tdms_file = TdmsFile(output_file)

        a_output = tdms_file.object("group", "a").data
        b_output = tdms_file.object("group", "b").data

        self.assertEqual(len(a_output), len(a_input))
        self.assertEqual(len(b_output), len(b_input))
        self.assertTrue((a_output == a_input).all())
        self.assertTrue((b_output == b_input).all())
Пример #45
0
def lade_tdms(par, typ):
    """
    :type par: Parameter.Parameter
    :type typ: str
    """
    daten = []
    namen = []

    # Dateinamen aufteilen und numerisch sortieren:
    sorted_fnames = sorted(
        glob(os.path.join(par.verzeichnis, typ + '*.tdms')),  # alle Dateien in diesem Ordner mit der Endung TDMS
        key=lambda x: int(x.split(os.sep)[-1].split(typ)[1].split('.')[0])  # Zeilennummer hinter dem Typnamen
    )

    # Offenbar das falsche Verzeichnis, wenn gar keine TDMS-Dateien gefunden wurden
    if len(sorted_fnames) == 0:
        raise Fehler(mw_tdms[lang])

    verbleibend = par.messpunkte
    for tdms_fname in sorted_fnames:
        if verbleibend != 0:
            verbleibend -= 1
            tdms_file = TdmsFile(tdms_fname)
            channel = tdms_file.object('Untitled', 'Untitled')  # erster Name ist der Gruppenname, dann der Kanalname
            daten.append(np.array(channel.data))
            namen.append(tdms_fname.split(os.sep)[-1])
        else:
            break

    # Es fehlen Messdaten, die letzte Zeile wird einfach vervielfacht
    if verbleibend > 0:
        print("Fehlende Messdaten (" + typ + ")")
        while verbleibend > 0:
            verbleibend -= 1
            daten.append(daten[-1])

    return daten, namen
Пример #46
0
while(1):
	if(number<10):
		randomFile = "SpectralOut_00"+str(number)+".tdms"
		powerFile = "PowerOut_00"+str(number)+".tdms"
	elif(number<100):
		randomFile = "SpectralOut_0"+str(number)+".tdms"
		powerFile = "PowerOut_0"+str(number)+".tdms"
	elif(number<1000):
		randomFile = "SpectralOut_"+str(number)+".tdms"
		powerFile = "PowerOut_"+str(number)+".tdms"
	print randomFile
	if(os.path.exists(randomFile)):
		print "inside " + randomFile
		random_tdms = TdmsFile('Spectral/' + randomFile)
		power_tdms = TdmsFile('Power/' + powerFile)
		b = random_tdms.object(random_tdms.groups()[0], "Untitled")
		c = power_tdms.object(power_tdms.groups()[0],"Untitled")
		x_set=np.array([b.data])
		power_set = np.array([c.data])
		# print(power_set[0])
		for x in range(1, 60):
		    b = random_tdms.object(random_tdms.groups()[0],"Untitled "+str(x))
		    np1=np.array([b.data])
		    x_set = np.concatenate((x_set,np1),axis=0)

		
		predicted=model.predict(x_set)
		# probability = (model.predict_proba(x_set).max(1))
		# for i in range(1,len(probability)):
		# 	if (probability[i] < 0.8):
		# 		predicted[i] = 1
	#build group string
	currentGroupString = timestamp + " - " + dataType + " - " + "All Data"



#what i have now is the group and channel name
#now we call the data and time from tdms file
datas = []
times = []

#setup tdms file
#from nptdms import TdmsFile
#just get the data from each channel
for channelName in voltageChannelNames:
	channel = tdmsVoltageFile.object(voltageGroupString,channelName)
	data = channel.data
	datas.append(data)
	times = channel.time_track()

for channelName in currentChannelNames:
	channel = tdmsCurrentFile.object(currentGroupString,channelName)
	data = channel.data
	datas.append(data)



#put all channel names into one list
channelNames = voltageChannelNames + currentChannelNames

#print channelNames
Пример #48
0
# data_file = 'Tdm_Example_File.tdx'  # This one doesn't work -- not sure why

# Calculate full path for file
file_path = os.path.join(data_dir, data_file)

tdmsfile = TdmsFile(file_path)
# channel = tdmsfile.object('Group', 'Channel1')
# data = channel.data
# time = channel.time_track()

show_properties = False
show_data = False
show_time_track = False

level = 0
root = tdmsfile.object()
display('/', level)
if show_properties:
    display_properties(root, level)
for group in tdmsfile.groups():
    level = 1
    group_obj = tdmsfile.object(group)
    display("%s" % group_obj.path, level)
    if show_properties:
        display_properties(group_obj, level)
    for channel in tdmsfile.group_channels(group):
        level = 2
        display("%s" % channel.path, level)
        if show_properties:
            level = 3
            display("data type: %s" % channel.data_type.name, level)
Пример #49
0
import pandas as pandas
# lampTrain1 = np.genfromtxt('/Users/Chryssia/Desktop/SchoolStuff/Gatech/ECE4011/SeniorDesign/Scikit/MachineLearningData/Lamp/LampAverage_1.csv',delimiter=' ',dtype=None)

# Item ID
# 1. No load
# 2. Lamp
# 3. Hair Drier
# 4. Small Blender
# 5. Big Blender
# 6. Hand Mixer
# 7. TV Box
# 8. Hair Drier and Lamp

# Lamp Training
lamp_tdms = TdmsFile("Lamp_001.tdms")
a = lamp_tdms.object("Untitled","Untitled 5")

x_train=np.array([a.data])
y_train = np.array(['2'])
for x in range(1, 10):
    a = lamp_tdms.object("Untitled","Untitled "+str(x*5))
   
    np1=np.array([a.data])
    x_train = np.concatenate((x_train,np1),axis=0)
    y_train = np.concatenate((y_train,[2]),axis=0)

# Hair Drier Training
hairDrier_tdms = TdmsFile("HairDyer_001.tdms")
for x in range(1, 11):
    a = hairDrier_tdms.object("Untitled","Untitled "+str(x*5))
  
def MergeSavePlotTDMS( mypath,
               VMAG = {'delta':0.1, 'low':0.1},
               NCY = {'pre':3, 'post': 5},
               BASE = {'Vln':277.128} ):
    """ Merges TDMS files, crops time to island formation to cessasion, saves in excel

    Input: Directory with the test result files, e.g.: "aLabView\\20150306"

    MergeTDMS maintains several Excel files in the target directory:
        MergeSummary.xlsx
        SignalsInfo.xlsx
        CroppedData.xlsx
    """
#   import pdb # debugger
    import datetime 
    import pandas as pd # multidimensional data analysis
#   import xlsxwriter
#   import numpy as np

    # Matplotlib ===
    # import matplotlib
    # matplotlib.use('Agg')
    import matplotlib.pyplot as plt
    # from matplotlib.backends.backend_pdf import PdfPages
    # Wei's advice ===
    import matplotlib.backends.backend_pdf as dpdf

    from os import listdir
    from os.path import isfile, join

    from nptdms import TdmsFile
    # from numpy import array
    from numpy import cos, sin, arctan2
    from pandas import concat, ExcelWriter, rolling_mean

    CONFIG = {'WriteAllDataToExcel':False, # Will take forever ~5min for 27GB
              'WriteLimitedDataToExcel':False, # Only data from island creation to cessation
              'WriteSummaryToExcel':True, # Summary from TDMS files
              'ValidateCTs':False, # Plot pages that validate CT scaling and orientation
              'PlotFullRange': False} # Add a page with full time range of data

    # BASE = {'Vln':480/sqrt(3)} # Voltage base

    # B2 LC1 CT group was reversed during calibration
    B2LC1SIGN = -1.0 #  
    
    
    # LC1 B1 CT was reversed on 20150311, restored, then reversed again during PG&E CT calibration
    B1LC1SIGN = -1.0 # reversed CT. Use +1 for correct polarity 
    # Limiting plot range of acquired signals
    # Islanding detection works by comparing: 'Island Contactor status' > icsLvl # abs(uVmag-iVmag)>delta
    # Collapse detection works by comparing: iVmag<low
    VMAG = {'icsLvl': 3, 'delta':0.1, 'low':0.1} # island contactor status Level, Signal magnitudes in p.u. to limit plot range
    NCY = {'pre':3, 'post': 5} # Number of cycles to show pre-islanding and post-collapse

    # mypath = 'aLabView\\20150311' # Now a function parameter
    myfiles = [f for f in listdir(mypath) if isfile(join(mypath,f)) ]
    # filtering for .tdms extension
    tdmsfiles_list = [f for f in myfiles if f.endswith(".tdms")]

    # empty dictionaries
    ch_data   = {} # interim results
    sig_info  = {} # fname -> DataFrame 
    sig_data  = {} # fname -> DataFrame time,signal,...,signal
    file_info = {} # fname -> DataFrame Comment

    fiComment = [] # list to collect all file comments
    fiProgRev = [] # ditto but program version

    # Cycling through files to figure out how many are concatenated
    for fname in tdmsfiles_list:
        tdms_file = TdmsFile(mypath + "\\" + fname)
        # fetching comments
        tdms_root = tdms_file.object()
        fiComment = fiComment + [tdms_root.property(u'Comment')]
        fiProgRev = fiProgRev + [tdms_root.property(u'Program Revision')]

        # groups_list = tdms_file.groups() # There is only one group 'Analog'
        channels_list = tdms_file.group_channels(u'Analog') # u for unicode.
        # pdb.set_trace() # Debugger stop if desired
        # ch_names = [ch.property(u'NI_ChannelName') for ch in channels_list]
        ch_names = [ch.path.split("/")[2].strip("' ") for ch in channels_list]
        ch_slope = [ch.property(u'NI_Scale[0]_Linear_Slope') for ch in channels_list]
        ch_icept = [ch.property(u'NI_Scale[0]_Linear_Y_Intercept') for ch in channels_list]
        ch_tstrt = [ch.property(u'wf_start_time') for ch in channels_list]
        ch_tincr = [ch.property(u'wf_increment') for ch in channels_list]
        ch_tsamp = [ch.property(u'wf_samples') for ch in channels_list]
        ch_toffs = [ch.property(u'wf_start_offset') for ch in channels_list]
        ch_tend  = [ch.property(u'wf_start_time') +
                    datetime.timedelta(
                        seconds=ch.property(u'wf_increment')*
                                ch.property(u'wf_samples'))
                    for ch in channels_list]
        ch_scld  = [ch.property(u'NI_Scaling_Status')!=u'unscaled' for ch in channels_list]
        # pack all this into a dataframe
        sig_info[fname] = pd.DataFrame({
                'chName':   ch_names,
                'chScaled': ch_scld,
                'chScale':  ch_slope,
                'chIcept':  ch_icept,
                'chTstart': ch_tstrt,
                'chTend':   ch_tend,
                'chTincr':  ch_tincr},
                columns=['chName',
                         'chScaled',
                         'chScale',
                         'chIcept',
                         'chTstart',
                         'chTend',
                         'chTincr'])

        ch_data['Time'] = ch.time_track()
        for ch in channels_list:
            # ch_data[ch.property(u'NI_ChannelName')] = ch.data*ch.property(u'NI_Scale[0]_Linear_Slope')
            ch_data[ch.path.split("/")[2].strip("' ")] = ch.data*ch.property(u'NI_Scale[0]_Linear_Slope')

        sig_data[fname] = pd.DataFrame(ch_data,columns=['Time']+ch_names)

    file_info = pd.DataFrame({
                'fiComment': fiComment,
                'fiProgRev': fiProgRev},
                columns=['fiComment',
                         'fiProgRev'],
                index=tdmsfiles_list)

    # Concatenating files that have a matching chTstart chTend
    keys = sorted(sig_info.keys())
    flast = keys[0]
    df1 = sig_info[flast]
    tStartLast = df1[df1.chName == u'Utility Bus V A'].chTstart.values[0]
    tEndLast   = df1[df1.chName == u'Utility Bus V A'].chTend.values[0]
    for fname in keys[1:]:
        df1 = sig_info[fname]
        tStart = df1[df1.chName == u'Utility Bus V A'].chTstart.values[0]
        tEnd   = df1[df1.chName == u'Utility Bus V A'].chTend.values[0]
        if(tEndLast == tStart):
            # merge files
            print tEndLast
            print tStart
            print fname + " continues " + flast
            sig_data[fname].Time += datetime.timedelta.total_seconds(tEndLast-tStartLast)
            sig_data[flast] = concat([sig_data[flast],sig_data[fname]],ignore_index=True)
            del sig_data[fname] # removes object from dictionary
            sig_info[flast].chTend = sig_info[fname].chTend
            del sig_info[fname]
            file_info = file_info.drop(fname)
            tEndLast = tEnd
        else:
            tStartLast = tStart
            tEndLast = tEnd
            flast = fname

    if CONFIG['WriteSummaryToExcel']:
        writer = ExcelWriter(mypath + '\\MergeSummary.xlsx')
        file_info.to_excel(writer,'file_info')
        if False: # error due to time zone awareness in LabView time stamps
            for fname in file_info.index.values.tolist():
                sig_info[fname].to_excel(writer,fname)
        writer.save()

    if CONFIG['WriteAllDataToExcel']: # This takes forever -- 5min for ~27GB
        writer = ExcelWriter(mypath + '\\AllData.xlsx')
        for fname in file_info.index.values.tolist():
            sig_data[fname].to_excel(writer,fname)
        writer.save()

    # Only the data from island formation to cessation
    if CONFIG['WriteLimitedDataToExcel']: # file is open here, but written from within the plot loop
        print "Opening: LimitedData.xlsx"
        writer = ExcelWriter(mypath + '\\LimitedData.xlsx')

    # Plotting results
    # Open pdf file
    print "Opening: Results.pdf"
    pltPdf = dpdf.PdfPages(mypath + '\\Results.pdf')
    # prepare a list of files to plot
    file_list = file_info.index.values.tolist();
    for fname in file_list:
    # for fname in [file_list[0]]:
        print "Processing: " + fname
        # Utility voltage magnitude: alpha beta -> mag
        uVa=sig_data[fname][u'Utility Bus V A'].values
        uVb=sig_data[fname][u'Utility Bus V B'].values
        uVc=sig_data[fname][u'Utility Bus V C'].values
        uVal = uVa - 0.5 * (uVb + uVc)
        uVbe = sqrt(3.)/2. * (uVb - uVc)
        uVmag = 2./3.*sqrt(uVal*uVal+uVbe*uVbe)
        sig_data[fname][u'Utility Vmag'] = pd.Series(uVmag,index=sig_data[fname].index)

        # Island voltage magnitude: alpha beta -> mag
        iVa=sig_data[fname][u'Island Bus V A'].values
        iVb=sig_data[fname][u'Island Bus V B'].values
        iVc=sig_data[fname][u'Island Bus V C'].values
        iVal = iVa - 0.5 * (iVb + iVc)
        iVbe = sqrt(3.)/2. * (iVb - iVc)
        iVmag = 2./3.*sqrt(iVal*iVal+iVbe*iVbe)
        sig_data[fname][u'Island Val'] = pd.Series(iVal,index=sig_data[fname].index)
        sig_data[fname][u'Island Vbe'] = pd.Series(iVbe,index=sig_data[fname].index)
        sig_data[fname][u'Island Vmag'] = pd.Series(iVmag,index=sig_data[fname].index)

        # Island voltage frequency calculations using PLL. Must execute in a for loop, can't vectorize
        L_VlnIn = 480*sqrt(2.)/sqrt(3.)
        Pll_BW = 4.0*377
        GmPllWn = .725*Pll_BW
        GmPllPrpGn = Pll_BW/L_VlnIn
        GmPllIntGn = GmPllWn*GmPllWn/L_VlnIn
        GmPllWInt = 377.
        GmPllWIntMx =  2.5*GmPllWInt
        GmPllWIntMn = -0.5*GmPllWInt
        GmPllW = 377.
        L_DelTm1 = sig_info[fname].chTincr.values[0] # Taking the first channel's time increment
        GmAngElecFbk = -arctan2(iVbe[0],iVal[0])
        iVx   = zeros(iVa.shape) # setting output arrays to zero
        iVy   = zeros(iVa.shape)
        iWpll = ones(iVa.shape)*377.0

        for i in range(0,iVa.shape[0]):
            # calculate angle
            GmPllDelAng  = L_DelTm1*GmPllW;
            GmAngElecFbk = mod(GmAngElecFbk + GmPllDelAng, 2*pi)
            
            # Calculate voltage transform
            iVx[i] =  iVal[i]*cos(GmAngElecFbk) + iVbe[i]*sin(GmAngElecFbk)
            iVy[i] = -iVal[i]*sin(GmAngElecFbk) + iVbe[i]*cos(GmAngElecFbk)
            # calculate voltage error
            GmPllVyErr = -iVy[i]
            # Calculate integral term, clamp
            GmPllWInt = GmPllWInt + GmPllIntGn*L_DelTm1*GmPllVyErr
            if (GmPllWInt > GmPllWIntMx): 
                GmPllWInt = GmPllWIntMx
            if (GmPllWInt < GmPllWIntMn):
                GmPllWInt = GmPllWIntMn
            # Calculate PLL frequency, clamp
            GmPllW = GmPllWInt + GmPllVyErr*GmPllPrpGn;
            if (GmPllW > GmPllWIntMx): 
                GmPllW = GmPllWIntMx
            if (GmPllW < GmPllWIntMn):
                GmPllW = GmPllWIntMn
            iWpll[i] = GmPllWInt

        sig_data[fname][u'Island Vx']   = pd.Series(iVx,  index=sig_data[fname].index)
        sig_data[fname][u'Island Vy']   = pd.Series(iVy,  index=sig_data[fname].index)
        sig_data[fname][u'Island Wpll'] = pd.Series(iWpll,index=sig_data[fname].index)
            
        # Island voltage rms values using rolling_mean of squared signals
        iVa2 = iVa*iVa
        iVb2 = iVb*iVb
        iVc2 = iVc*iVc
        sig_data[fname][u'Island Va^2'] = pd.Series(iVa2,index=sig_data[fname].index)
        sig_data[fname][u'Island Vb^2'] = pd.Series(iVb2,index=sig_data[fname].index)
        sig_data[fname][u'Island Vc^2'] = pd.Series(iVc2,index=sig_data[fname].index)

        tinc = sig_info[fname]['chTincr'][sig_info[fname]['chName']==u'Island Bus V A'].values[0]
        Varms = sqrt(rolling_mean(sig_data[fname][u'Island Va^2'],1./60./tinc).values)
        Vbrms = sqrt(rolling_mean(sig_data[fname][u'Island Vb^2'],1./60./tinc).values)
        Vcrms = sqrt(rolling_mean(sig_data[fname][u'Island Vc^2'],1./60./tinc).values)
        sig_data[fname][u'Island Varms'] = pd.Series(Varms,index=sig_data[fname].index)
        sig_data[fname][u'Island Vbrms'] = pd.Series(Vbrms,index=sig_data[fname].index)
        sig_data[fname][u'Island Vcrms'] = pd.Series(Vcrms,index=sig_data[fname].index)

        # Island voltage sequence components based on rms values
        Vposx = Varms - Vbrms*cos(pi/3)*cos(2*pi/3) + Vbrms*sin(pi/3)*sin(2*pi/3) - Vcrms*cos(pi/3)*cos(4*pi/3) - Vcrms*sin(pi/3)*sin(4*pi/3)
        Vposy =       - Vbrms*cos(pi/3)*sin(2*pi/3) - Vbrms*sin(pi/3)*cos(2*pi/3) - Vcrms*cos(pi/3)*sin(4*pi/3) + Vcrms*sin(pi/3)*cos(4*pi/3)
        Vpos = sqrt(Vposx*Vposx+Vposy*Vposy)/3
        Vnegx = Varms - Vbrms*cos(pi/3)*cos(4*pi/3) + Vbrms*sin(pi/3)*sin(4*pi/3) - Vcrms*cos(pi/3)*cos(2*pi/3) - Vcrms*sin(pi/3)*sin(2*pi/3)
        Vnegy =       - Vbrms*cos(pi/3)*sin(4*pi/3) - Vbrms*sin(pi/3)*cos(4*pi/3) - Vcrms*cos(pi/3)*sin(2*pi/3) + Vcrms*sin(pi/3)*cos(2*pi/3)
        Vneg = sqrt(Vnegx*Vnegx+Vnegy*Vnegy)/3
        Vzerx = Varms - Vbrms*cos(pi/3) - Vcrms*cos(pi/3)
        Vzery =       - Vbrms*sin(pi/3) + Vcrms*sin(pi/3)
        Vzer  = sqrt(Vzerx*Vzerx+Vzery*Vzery)/3
        sig_data[fname][u'Island Vpos'] = pd.Series(Vpos,index=sig_data[fname].index)
        sig_data[fname][u'Island Vneg'] = pd.Series(Vneg,index=sig_data[fname].index)
        sig_data[fname][u'Island Vzer'] = pd.Series(Vzer,index=sig_data[fname].index)

        # Utility currents
        uIa=sig_data[fname][u'Utility I A'].values
        uIb=sig_data[fname][u'Utility I B'].values
        uIc=sig_data[fname][u'Utility I C'].values

        uIal = uIa - 0.5 * (uIb + uIc)
        uIbe = sqrt(3.)/2. * (uIb - uIc)
        sig_data[fname][u'uIal'] = pd.Series(uIal,index=sig_data[fname].index)
        sig_data[fname][u'uIbe'] = pd.Series(uIbe,index=sig_data[fname].index)

        # Utility Power calcuations kW
        uP = (uVa*uIa+uVb*uIb+uVc*uIc)/1000
        uQ = ((uVb-uVc)*uIa+(uVa-uVb)*uIc+(uVc-uVa)*uIb)/sqrt(3)/1000
        sig_data[fname][u'P Utility'] = pd.Series(uP,index=sig_data[fname].index)
        sig_data[fname][u'Q Utility'] = pd.Series(uQ,index=sig_data[fname].index)

        # RLC currents
        rIa=sig_data[fname][u'RLC Passive Load I A'].values
        rIb=sig_data[fname][u'RLC Passive Load I B'].values
        rIc=sig_data[fname][u'RLC Passive Load I C'].values

        # RLC power calcuations
        rP = (iVa*rIa+iVb*rIb+iVc*rIc)/1000
        rQ = ((iVb-iVc)*rIa+(iVa-iVb)*rIc+(iVc-iVa)*rIb)/sqrt(3)/1000
        sig_data[fname][u'P RLC'] = pd.Series(rP,index=sig_data[fname].index)
        sig_data[fname][u'Q RLC'] = pd.Series(rQ,index=sig_data[fname].index)

        # Amplifier currents
        ampIa=sig_data[fname][u'GE Load I A'].values
        ampIb=sig_data[fname][u'GE Load I B'].values
        ampIc=sig_data[fname][u'GE Load I C'].values

        # Amplifier power calculations
        ampP = (iVa*ampIa+iVb*ampIb+iVc*ampIc)/1000
        ampQ = ((iVb-iVc)*ampIa+(iVa-iVb)*ampIc+(iVc-iVa)*ampIb)/sqrt(3)/1000
        sig_data[fname][u'P AMP'] = pd.Series(ampP,index=sig_data[fname].index)
        sig_data[fname][u'Q AMP'] = pd.Series(ampQ,index=sig_data[fname].index)

        # B2 currents
        b2Ia=B2LC1SIGN*sig_data[fname][u'B2 LC1 I A'].values
        b2Ib=B2LC1SIGN*sig_data[fname][u'B2 LC1 I B'].values
        b2Ic=B2LC1SIGN*sig_data[fname][u'B2 LC1 I C'].values

        # B2 Power calculations
        b2P = (iVa*b2Ia+iVb*b2Ib+iVc*b2Ic)/1000
        b2Q = ((iVb-iVc)*b2Ia+(iVa-iVb)*b2Ic+(iVc-iVa)*b2Ib)/sqrt(3)/1000
        sig_data[fname][u'P B2'] = pd.Series(b2P,index=sig_data[fname].index)
        sig_data[fname][u'Q B2'] = pd.Series(b2Q,index=sig_data[fname].index)

        # B1 currents
        b1LC1=B1LC1SIGN*sig_data[fname][u'B1 LC1 I'].values
        b1LC2=sig_data[fname][u'B1 LC2 I'].values
        b1LC3=sig_data[fname][u'B1 LC3 I'].values
        
        b1Ia = b1LC1 - b1LC2
        b1Ib = b1LC3 - b1LC1
        b1Ic = b1LC2 - b1LC3
        sig_data[fname][u'b1Ia'] = pd.Series(b1Ia,index=sig_data[fname].index)
        sig_data[fname][u'b1Ib'] = pd.Series(b1Ib,index=sig_data[fname].index)
        sig_data[fname][u'b1Ic'] = pd.Series(b1Ic,index=sig_data[fname].index)
        
        # B1 Power calculations
        b1P = (iVa*b1Ia+iVb*b1Ib+iVc*b1Ic)/1000
        b1Q = ((iVb-iVc)*b1Ia+(iVa-iVb)*b1Ic+(iVc-iVa)*b1Ib)/sqrt(3)/1000
        sig_data[fname][u'P B1'] = pd.Series(b1P,index=sig_data[fname].index)
        sig_data[fname][u'Q B1'] = pd.Series(b1Q,index=sig_data[fname].index)

        # Total PV calculations (banks 1 and 2)
        pvIa = b1Ia + b2Ia
        pvIb = b1Ib + b2Ib
        pvIc = b1Ic + b2Ic
        sig_data[fname][u'pvIa'] = pd.Series(pvIa,index=sig_data[fname].index)
        sig_data[fname][u'pvIb'] = pd.Series(pvIb,index=sig_data[fname].index)
        sig_data[fname][u'pvIc'] = pd.Series(pvIc,index=sig_data[fname].index)

        pvIal = pvIa - 0.5 * (pvIb + pvIc)
        pvIbe = sqrt(3.)/2. * (pvIb - pvIc)
        sig_data[fname][u'pvIal'] = pd.Series(pvIal,index=sig_data[fname].index)
        sig_data[fname][u'pvIbe'] = pd.Series(pvIbe,index=sig_data[fname].index)
                
        # Penetration calculations
        # penB1 = where(iVmag/sqrt(2)/BASE['Vln'] > VMAG['low'],b1P/rP,NaN)
        # penB2 = where(iVmag/sqrt(2)/BASE['Vln'] > VMAG['low'],b2P/rP,NaN)
        # penPV = where(iVmag/sqrt(2)/BASE['Vln'] > VMAG['low'],(b1P+b2P)/rP,NaN)
        penB1 = where(sig_data[fname][u'Island Contactor status'] > VMAG['icsLvl'],b1P/(rP+ampP),NaN)
        penB2 = where(sig_data[fname][u'Island Contactor status'] > VMAG['icsLvl'],b2P/(rP+ampP),NaN)
        penPV = where(sig_data[fname][u'Island Contactor status'] > VMAG['icsLvl'],(b1P+b2P)/(rP+ampP),NaN)
        sig_data[fname][u'B1 pen'] = pd.Series(penB1,index=sig_data[fname].index)
        sig_data[fname][u'B2 pen'] = pd.Series(penB2,index=sig_data[fname].index)
        sig_data[fname][u'B1+B2 pen'] = pd.Series(penPV,index=sig_data[fname].index)

        # Selecting a region of interest: island creation to cessation
        df1 = sig_data[fname]
        # ix1 = df1[abs(df1[u'Utility Vmag']-df1[u'Island Vmag'])/sqrt(2)/BASE['Vln'] > VMAG['delta']].index.values[0]
        if df1[abs(df1[u'Island Contactor status']) < VMAG['icsLvl']].empty:
            ix1 = df1.index.values[-1]/2
        else:
            ix1 = df1[abs(df1[u'Island Contactor status']) < VMAG['icsLvl']].index.values[0]
        if df1[abs(df1[u'Island Vmag'])/sqrt(2)/BASE['Vln'] < VMAG['low']].empty:
            ix2 = df1.index.values[-1]/2
        else:
            ix2 = df1[abs(df1[u'Island Vmag'])/sqrt(2)/BASE['Vln'] < VMAG['low']].index.values[0]

        tinc = sig_info[fname]['chTincr'][sig_info[fname]['chName']==u'Utility Bus V A'].values[0]
        left = int(NCY['pre']*1./60./tinc)
        right = int(NCY['post']*1./60./tinc)
        ix1 = max([ix1-left,0])
        ix2 = min([ix2+right,df1.index.values[-1]])
        df2 = df1[(df1.index > ix1) & (df1.index < ix2)]

        
        if CONFIG['WriteLimitedDataToExcel']: # Only the data from island formation to cessation
            df2.to_excel(writer,fname) # data is written here

        if True: # Place to try new things
            # Fig1: Utility voltage
            fig, (ax0, ax1)= plt.subplots(nrows=2, ncols=1, figsize=(8.5,11))
            fig.suptitle(fname) # This titles the figure
            # File info output to page top
            label= file_info[file_info.index==fname][['fiComment']].values[0][0]
            ax0.annotate(label,
                         xy=(0.5/8.5, 10.5/11), # (0.5,-0.25)inch from top left corner
                         xycoords='figure fraction',
                         horizontalalignment='left',
                         verticalalignment='top',
                         fontsize=10)
            subplots_adjust(top=9./11.)
            
            # Alpha/Beta plots
            ax0.set_title('Island Voltage Al/Be')
            ax0.plot(df2['Island Val']/1.5/sqrt(2)/BASE['Vln'], df2['Island Vbe']/1.5/sqrt(2)/BASE['Vln'])
            ax0.set_xlim([-1.5,1.5])
            ax0.set_ylim([-1.2,1.2])
            ax0.grid(True, which='both')
            ax0.set_aspect('equal')

            ax1.set_title('Currents Al/Be')
            ax1.plot(df2['pvIal']/1.5, df2['pvIbe']/1.5)
            # ax1.set_ylim([-1.2,1.2])
            ax1.grid(True, which='both')
            ax1.set_aspect('equal')
            # ax1.set_title('Island Voltage Al/Be')
            # ax1.plot(df2['Time'], df2['Island Val']/1.5/sqrt(2)/BASE['Vln'])
            # ax1.plot(df2['Time'], df2['Island Vbe']/1.5/sqrt(2)/BASE['Vln'])
            # ax1.set_ylim([-1.2,1.2])
            # ax1.grid(True, which='both')

            pltPdf.savefig() # saves fig to pdf
            plt.close() # Closes fig to clean up memory

        if False: # Adding a chart with PLL variables
            # Fig1a: 
            fig, (ax0,ax1,ax2,ax3) = plt.subplots(nrows=4, ncols=1,
                                                      figsize=(8.5,11),
                                                      sharex=True)
            fig.suptitle(fname) # This titles the figure

            ax0.set_title('Utility Bus Vabc')
            ax0.plot(df2['Time'], df2[u'Utility Bus V A'])
            ax0.plot(df2['Time'], df2[u'Utility Bus V B'])
            ax0.plot(df2['Time'], df2[u'Utility Bus V C'])
            ax0.set_ylim([-500,500])
            ax0.grid(True, which='both')

            ax1.set_title('Island Bus Vabc')
            ax1.plot(df2['Time'], df2[u'Island Bus V A'])
            ax1.plot(df2['Time'], df2[u'Island Bus V B'])
            ax1.plot(df2['Time'], df2[u'Island Bus V C'])
            ax1.plot(df2['Time'], df2[u'Island Vmag'])
            # ax1.set_ylim([-500,500])
            ax1.grid(True, which='both')

            ax2.set_title('Island Bus Frequency')
            ax2.plot(df2['Time'], df2[u'Island Wpll']/(2*pi))
            # ax2.set_ylim([-100,100])
            ax2.grid(True, which='both')

            ax3.set_title('Island Bus Vx, Vy')
            ax3.plot(df2['Time'], df2[u'Island Vx'])
            ax3.plot(df2['Time'], df2[u'Island Vy'])
            # ax3.set_ylim([-100,100])
            ax3.grid(True, which='both')

            pltPdf.savefig() # Saves fig to pdf
            plt.close() # Closes fig to clean up memory


        if CONFIG['PlotFullRange']: # Plots a page with entire length of captured signals
            # Fig2: 
            fig, (ax0,ax1,ax2,ax3,ax4) = plt.subplots(nrows=5, ncols=1,
                                                      figsize=(8.5,11),
                                                      sharex=True)
            # plt.title(fname) # this has no effect

            # ax0.set_title('Utility Bus Vabc')
            # ax0.plot(sig_data[fname]['Time'], sig_data[fname][u'Utility Bus V A'])
            # ax0.plot(sig_data[fname]['Time'], sig_data[fname][u'Utility Bus V B'])
            # ax0.plot(sig_data[fname]['Time'], sig_data[fname][u'Utility Bus V C'])
            # ax0.set_ylim([-500,500])
            # ax0.grid(True, which='both')

            ax0.set_title('Island Bus Vabc')
            ax0.plot(sig_data[fname]['Time'], sig_data[fname][u'Island Bus V A'])
            ax0.plot(sig_data[fname]['Time'], sig_data[fname][u'Island Bus V B'])
            ax0.plot(sig_data[fname]['Time'], sig_data[fname][u'Island Bus V C'])
            ax0.plot(sig_data[fname]['Time'], sig_data[fname][u'Island Vmag'])
            ax0.set_ylim([-500,500])
            ax0.grid(True, which='both')

            ax1.set_title('Island Bus Frequency')
            ax1.plot(df2['Time'], df2[u'Island Wpll']/(2*pi))
            ax1.set_ylim([-120,120])
            ax1.grid(True, which='both')

            ax2.set_title('RLC Load Current Iabc')
            ax2.plot(sig_data[fname]['Time'], sig_data[fname][u'RLC Passive Load I A'])
            ax2.plot(sig_data[fname]['Time'], sig_data[fname][u'RLC Passive Load I B'])
            ax2.plot(sig_data[fname]['Time'], sig_data[fname][u'RLC Passive Load I C'])
            ax2.set_ylim([-100,100])
            ax2.grid(True, which='both')

            ax3.set_title('B1+B2 Iabc')
            ax3.plot(sig_data[fname]['Time'], sig_data[fname][u'pvIa'])
            ax3.plot(sig_data[fname]['Time'], sig_data[fname][u'pvIb'])
            ax3.plot(sig_data[fname]['Time'], sig_data[fname][u'pvIb'])
            ax3.set_ylim([-100,100])
            ax3.grid(True, which='both')
 
            ax4.set_title('Utility Iabc')
            ax4.plot(sig_data[fname]['Time'], sig_data[fname][u'Utility I A'])
            ax4.plot(sig_data[fname]['Time'], sig_data[fname][u'Utility I B'])
            ax4.plot(sig_data[fname]['Time'], sig_data[fname][u'Utility I C'])
            ax4.set_ylim([-100,100])
            ax4.grid(True, which='both')
 
            pltPdf.savefig() # Saves fig to pdf
            plt.close() # Closes fig to clean up memory

        if CONFIG['ValidateCTs']: # Plots a page to validate CT reads and orientation
            # FigX: 
            fig, (ax0,ax1,ax2,ax3,ax4,ax5) = plt.subplots(nrows=6, ncols=1,
                                                      figsize=(8.5,11),
                                                      sharex=True)
            fig.suptitle(fname) # This titles the figure

            ax0.set_title('Phase A CTs: rlc_Ia = u_Ia + pv_Ia')
            ax0.plot(df2['Time'], df2[u'RLC Passive Load I A'])
            ax0.plot(df2['Time'], df2[u'Utility I A']+df2[u'pvIa'])
            # ax0.set_ylim([-50,50])
            ax0.grid(True, which='both')

            ax1.set_title('Phase B CTs: rlc_Ib = u_Ib + pv_Ib')
            ax1.plot(df2['Time'], df2[u'RLC Passive Load I B'])
            ax1.plot(df2['Time'], df2[u'Utility I B']+df2[u'pvIb'])
            # ax1.set_ylim([-50,50])
            ax1.grid(True, which='both')

            ax2.set_title('Phase C CTs: rlc_Ic = u_Ic + pv_Ic')
            ax2.plot(df2['Time'], df2[u'RLC Passive Load I C'])
            ax2.plot(df2['Time'], df2[u'Utility I C']+df2[u'pvIc'])
            # ax2.set_ylim([-50,50])
            ax2.grid(True, which='both')

            ax3.set_title('Phase A CTs: u_Ia = rlc_Ia - pv_Ia, b2Ia')
            ax3.plot(df2['Time'], df2[u'Utility I A'])
            ax3.plot(df2['Time'], df2[u'RLC Passive Load I A']-df2[u'pvIa'])
            ax3.plot(df2['Time'], df2[u'B2 LC1 I A'])
            # ax3.set_ylim([-25,25])
            ax3.grid(True, which='both')

            ax4.set_title('Phase B CTs: u_Ib = rlc_Ib - pv_Ib, b2Ib')
            ax4.plot(df2['Time'], df2[u'Utility I B'])
            ax4.plot(df2['Time'], df2[u'RLC Passive Load I B']-df2[u'pvIb'])
            ax4.plot(df2['Time'], df2[u'B2 LC1 I B'])
            # ax4.set_ylim([-25,25])
            ax4.grid(True, which='both')

            ax5.set_title('Phase C CTs: u_Ic = rlc_Ic - pv_Ic, b2Ic')
            ax5.plot(df2['Time'], df2[u'Utility I C'])
            ax5.plot(df2['Time'], df2[u'RLC Passive Load I C']-df2[u'pvIc'])
            ax5.plot(df2['Time'], df2[u'B2 LC1 I C'])
            # ax5.set_ylim([-25,25])
            ax5.grid(True, which='both')

            pltPdf.savefig() # Saves fig to pdf
            plt.close() # Closes fig to clean up memory

            fig, (ax0,ax1,ax2,ax3,ax4,ax5) = plt.subplots(nrows=6, ncols=1,
                                                      figsize=(8.5,11),
                                                      sharex=True)
            fig.suptitle(fname) # This titles the figure

            ax0.set_title('Phase A CTs: rlc_Ia = u_Ia + pv_Ia')
            ax0.plot(df2['Time'], df2[u'RLC Passive Load I A'])
            ax0.plot(df2['Time'], df2[u'Utility I A']+df2[u'pvIa'])
            # ax0.set_ylim([-50,50])
            ax0.grid(True, which='both')

            ax1.set_title('Phase B CTs: rlc_Ib = u_Ib + pv_Ib')
            ax1.plot(df2['Time'], df2[u'RLC Passive Load I B'])
            ax1.plot(df2['Time'], df2[u'Utility I B']+df2[u'pvIb'])
            # ax1.set_ylim([-50,50])
            ax1.grid(True, which='both')

            ax2.set_title('Phase C CTs: rlc_Ic = u_Ic + pv_Ic')
            ax2.plot(df2['Time'], df2[u'RLC Passive Load I C'])
            ax2.plot(df2['Time'], df2[u'Utility I C']+df2[u'pvIc'])
            # ax2.set_ylim([-50,50])
            ax2.grid(True, which='both')

            ax3.set_title('Phase A CTs: pv_Ia = rlc_Ia - u_Ia, b1Ia, b2Ia')
            ax3.plot(df2['Time'], df2[u'RLC Passive Load I A']-df2[u'Utility I A'])
            ax3.plot(df2['Time'], df2[u'pvIa'])
            ax3.plot(df2['Time'], df2[u'b1Ia'])
            ax3.plot(df2['Time'], df2[u'B2 LC1 I A'])
            # ax3.set_ylim([-50,50])
            ax3.grid(True, which='both')

            ax4.set_title('Phase B CTs: pv_Ib = rlc_Ib - u_Ib, b1Ib, b2Ib')
            ax4.plot(df2['Time'], df2[u'RLC Passive Load I B']-df2[u'Utility I B'])
            ax4.plot(df2['Time'], df2[u'pvIb'])
            ax4.plot(df2['Time'], df2[u'b1Ib'])
            ax4.plot(df2['Time'], df2[u'B2 LC1 I B'])
            # ax4.set_ylim([-50,50])
            ax4.grid(True, which='both')

            ax5.set_title('Phase C CTs: pv_Ic = rlc_Ic - u_Ic, b1Ic, b2Ic')
            ax5.plot(df2['Time'], df2[u'RLC Passive Load I C']-df2[u'Utility I C'])
            ax5.plot(df2['Time'], df2[u'pvIc'])
            ax5.plot(df2['Time'], df2[u'b1Ic'])
            ax5.plot(df2['Time'], df2[u'B2 LC1 I C'])
            # ax5.set_ylim([-50,50])
            ax5.grid(True, which='both')

            pltPdf.savefig() # Saves fig to pdf
            plt.close() # Closes fig to clean up memory

        # Fig3: 
        fig, (ax0,ax1,ax2,ax3,ax4) = plt.subplots(nrows=5, ncols=1,
                                                  figsize=(8.5,11),
                                                  sharex=True)
        fig.suptitle(fname) # This titles the figure

        # ax0.set_title('Utility Bus Vabc')
        # ax0.plot(df2['Time'], df2[u'Utility Bus V A'])
        # ax0.plot(df2['Time'], df2[u'Utility Bus V B'])
        # ax0.plot(df2['Time'], df2[u'Utility Bus V C'])
        # ax0.set_ylim([-500,500])
        # ax0.grid(True, which='both')

        ax0.set_title('Island Bus Vabc')
        ax0.plot(df2['Time'], df2[u'Island Bus V A'])
        ax0.plot(df2['Time'], df2[u'Island Bus V B'])
        ax0.plot(df2['Time'], df2[u'Island Bus V C'])
        ax0.plot(df2['Time'], df2[u'Island Vmag'])
        # ax0.set_ylim([-500,500])
        ax0.grid(True, which='both')

        ax1.set_title('Island Bus Frequency')
        ax1.plot(df2['Time'], df2[u'Island Wpll']/(2*pi))
        ax1.set_ylim([-60, 180])
        ax1.grid(True, which='both')

        ax2.set_title('Total Load Current Iabc')
        ax2.plot(df2['Time'], df2[u'RLC Passive Load I A']+df2[u'GE Load I A'])
        ax2.plot(df2['Time'], df2[u'RLC Passive Load I B']+df2[u'GE Load I B'])
        ax2.plot(df2['Time'], df2[u'RLC Passive Load I C']+df2[u'GE Load I C'])
        # ax2.set_ylim([-100,100])
        ax2.grid(True, which='both')

        ax3.set_title('B1+B2 Iabc')
        ax3.plot(df2['Time'], df2[u'pvIa'])
        ax3.plot(df2['Time'], df2[u'pvIb'])
        ax3.plot(df2['Time'], df2[u'pvIc'])
        # ax3.set_ylim([-100,100])
        ax3.grid(True, which='both')

        ax4.set_title('Utility Iabc')
        ax4.plot(df2['Time'], df2[u'Utility I A'])
        ax4.plot(df2['Time'], df2[u'Utility I B'])
        ax4.plot(df2['Time'], df2[u'Utility I C'])
        # ax4.set_ylim([-100,100])
        ax4.grid(True, which='both')

        pltPdf.savefig() # Saves fig to pdf
        plt.close() # Closes fig to clean up memory

        # Fig4: 
        fig, (ax0,ax1,ax2,ax3,ax4) = plt.subplots(nrows=5, ncols=1,
                                                  figsize=(8.5,11),
                                                  sharex=True)
        fig.suptitle(fname) # This titles the figure

        ax0.set_title('P[kW]: Utility, Load, PV')
        ax0.plot(df2['Time'], df2[u'P Utility'])
        ax0.plot(df2['Time'], df2[u'P RLC']+df2[u'P AMP'])
        ax0.plot(df2['Time'], df2[u'P B1']+df2[u'P B2'])
        # ax0.set_ylim([-50,250])
        ax0.grid(True, which='both')

        ax1.set_title('Q[kVAr]: Utility, Load, PV')
        ax1.plot(df2['Time'], df2[u'Q Utility'])
        ax1.plot(df2['Time'], df2[u'Q RLC']+df2[u'Q AMP'])
        ax1.plot(df2['Time'], df2[u'Q B1']+df2[u'Q B2'])
        # ax1.set_ylim([-80,80])
        ax1.grid(True, which='both')

        ax2.set_title('Island Vpos, pu penetration')
        ax2.plot(df2['Time'], df2[u'Island Vpos']/BASE['Vln'])
        ax2.plot(df2['Time'], df2[u'B1+B2 pen'])
        ax2.set_ylim([0,1.5])
        ax2.grid(True, which='both')

        ax3.set_title('Island Vneg, Vzero')
        ax3.plot(df2['Time'], df2[u'Island Vneg']/BASE['Vln'])
        ax3.plot(df2['Time'], df2[u'Island Vzer']/BASE['Vln'])
        # ax3.set_ylim([0,0.25])
        ax3.grid(True, which='both')

        ax4.set_title('Island Vrms abc')
        ax4.plot(df2['Time'], df2[u'Island Varms']/BASE['Vln'])
        ax4.plot(df2['Time'], df2[u'Island Vbrms']/BASE['Vln'])
        ax4.plot(df2['Time'], df2[u'Island Vcrms']/BASE['Vln'])
        # ax4.set_ylim([0,1.25])
        ax4.grid(True, which='both')

        pltPdf.savefig() # Saves fig to pdf
        plt.close() # Closes fig to clean up memory

    print "Closing: Results.pdf"
    pltPdf.close() # Close the pdf file

    if CONFIG['WriteLimitedDataToExcel']: # Close excel file
        print "Writing: LimitedData.xlsx"
        writer.save() # file is saved here
    
    return
Пример #51
0
    Mx = np.column_stack((data_X[:, np.newaxis], data_Y[:, np.newaxis]))
    M2 = Red.rdp(Mx, x1)
    data_X2 = M2[:, 0].transpose()
    data_Y2 = M2[:, 1].transpose()
    return data_X2, data_Y2


def PandasResample(df, length):
    td = (df.index[-1] - df.index[0]) / (length - 1)
    return df.resample(td, how='mean').interpolate()  # Handle NaNs when upsampling


tdms_file = TdmsFile('data/10676_3531053.tdms')
data = tdms_file.as_dataframe()

channel = tdms_file.object("Unbenannt", "ProbenMitte")
# print channel.property('wf_increment')
# print channel.property('wf_start_time')
# print channel.time_track()

# kills alle leeren Spalten
data = data.dropna(axis=1, how='all')

# doppelte Spaltennamen aufloesen
name_col = []
for jh in data.columns:
    tdummy = jh.split('/')[-1]
    name_col.append(tdummy.replace("\'", ""))
data.columns = name_col

date_index = pd.date_range(channel.property('wf_start_time'), periods=data.index[-1] + 1, freq='30s')
# print number_of_groups   this gives a "1", so there is only one group
for groupname in list_of_groups:
    print groupname  # the groupname is "data"; it means to print every element of a list
    list_of_channels = datafile.group_channels(groupname) # group channels is a function from npTDMS, what returns a list of channel objects
    for channel in list_of_channels:
        print channel

# extracting first waveform
# getting voltages
bin_res_x = []
bin_res_y = []
#for group in ("Cube X1",):
for group in ("Cube X1", "Cube X2", "Cube Y1", "Cube Y2", "Cube Z1", "Cube Z2"):
#for group in ("Cube X2", "Cube Y1", "Cube Y2", "Cube Z1", "Cube Z2"):
    print "Group: " + group
    cubeX1 = datafile.object('data',group)      # we have one group and a list of channels in that group
    # getting time increment and then creating time array
    print cubeX1.properties
    dt = cubeX1.property('wf_increment') # extract the time information of the properties of the group "cubeX1"
    print "sample_period = " + str(dt)
    cubeX1_y = cubeX1.data     # create the y-axis for the plot, so the voltage values
    # print cubeX1_y
    cubeX1_x = [0 for x in range(len(cubeX1_y))]
    print len(cubeX1_y)
    for count in range(0,len(cubeX1_y),1):
        cubeX1_x[count] = count*dt

    #channel = datafile.object('data','Cube X1')
    #data = channel.data

    sp = rfft(cubeX1_y)
Пример #53
0
def chunker(seq, size):
    return (seq[pos:pos + size] for pos in xrange(0, len(seq), size))


def Reduce_data(data_X, data_Y, x1):
    Mx = np.column_stack((data_X[:, np.newaxis], data_Y[:, np.newaxis]))
    M2 = Red.rdp(Mx, x1)
    data_X2 = M2[:, 0].transpose()
    data_Y2 = M2[:, 1].transpose()
    return data_X2, data_Y2


def PandasResample(df, length):
    td = (df.index[-1] - df.index[0]) / (length - 1)
    return df.resample(td, how='mean').interpolate()  # Handle NaNs when upsampling


tdms_file = TdmsFile('data/_1620291.tdms')
data = tdms_file.as_dataframe()
print data.columns

tizm = tdms_file.object("Unbenannt", "Zeit")
channel = tdms_file.object("Unbenannt", "OfenIO")
channel1 = tdms_file.object("Unbenannt", "T_X6")
channel2 = tdms_file.object("Unbenannt", "Soll")
# plt.plot(tizm.data,channel.data)
# ax.scatter(x, y, c=c, cmap=cmap)
plt.scatter(tizm.data, channel2.data, c=channel.data * 3., cmap=plt.cm.hot)
plt.show()
Пример #54
0
i = dframes>1000
frameIndex = counter[i]
dataFrameStart = frameIndex[0]
print len(frameIndex), 'frames detected'

#######################
# Cut times of interest
#######################
onset = 26900 #30000 #26900
pre = 200
post = 800
start = onset-pre
end = onset+post

# Cut tdms
pos = tdms.object('Real-time Coordinates', 'X-Vertical').data[start:end]
spot = tdms.object('Visual  Stimulation', 'Spot Diameter').data[start:end]

# Cut probe data
#print frameIndex[onset]
pstart = frameIndex[start]+dataStart
pend = frameIndex[end]+dataStart
trace = data[pstart:pend]
#print len(data)
#print pstart, pend, len(trace)

###############
# Make neo file
###############
#bl = neo.Block(name='Ch2')
#seg = neo.Segment(name='Trial_1')
Пример #55
0
    def __init__(self, tdms_filename):
        """ Load tdms file and set all variables """
        # Kernel density estimator dictionaries
        self._KDE_Scatter = {}
        self._KDE_Contour = {}
        self._old_filters = {} # for comparison to new filters
        self._Downsampled_Scatter = {}
        self._polygon_filter_ids = []
        
        self.tdms_filename = tdms_filename
        self.name = os.path.split(tdms_filename)[1].split(".tdms")[0]
        self.fdir = os.path.dirname(tdms_filename)

        mx = os.path.join(self.fdir, self.name.split("_")[0])
        
        self.title = u"{} - {}".format(
                      GetProjectNameFromPath(tdms_filename),
                      os.path.split(mx)[1])
        
        f2hash = [ tdms_filename, mx+"_camera.ini", mx+"_para.ini" ]
        
        self.file_hashes = [(fname, _hashfile(fname)) for fname in f2hash]

        self.identifier = self.file_hashes[0][1]

        tdms_file = TdmsFile(tdms_filename)
        
        ## Set all necessary internal parameters as defined in
        ## definitions.py
        ## Note that this is meta-programming. If you want to add a
        ## different column from tdms files, then edit definitions.py:
        ## -> uid, axl, rdv, tfd
        # time is always there
        datalen = len(tdms_file.object("Cell Track", "time").data)
        for i, group in enumerate(dfn.tfd):
            table = group[0]
            if not isinstance(group[1], list):
                group[1] = [group[1]]
            func = group[2]
            args = []
            try:
                for arg in group[1]:
                    data = tdms_file.object(table, arg).data
                    args.append(data)
            except KeyError:
                # set it to zero
                func = lambda x: x
                args = [np.zeros(datalen)]
            finally:
                setattr(self, dfn.rdv[i], func(*args))

        # Plotting filters, set by "GetDownSampledScatter".
        # This is a nested filter which is applied after self._filter
        self._plot_filter = np.ones_like(self.time, dtype=bool)

        # Set array filters:
        # This is the filter that will be used for plotting:
        self._filter = np.ones_like(self.time, dtype=bool)
        # The filtering array for a general data event limit:
        self._filter_limit = np.ones_like(self._filter)
        attrlist = dir(self)
        # Find attributes to be filtered
        # These are the filters from which self._filter is computed
        inifilter = np.ones(data.shape, dtype=bool)
        for attr in attrlist:
            # only allow filterable attributes from global dfn.cfgmap
            if not dfn.cfgmap.has_key(attr):
                continue
            data = getattr(self, attr)
            if isinstance(data, np.ndarray):
                # great, we are dealing with an array
                setattr(self, "_filter_"+attr, inifilter.copy())
        self._filter_polygon = inifilter.copy()

        self.SetConfiguration()

        # Get video file name
        videos = []
        for f in os.listdir(self.fdir):
            if f.endswith(".avi") and f.startswith(self.name[:2]):
                videos.append(f)
        videos.sort()
        if len(videos) == 0:
            self.video = None
        else:
            # Defaults to first avi file
            self.video = videos[0]
            # g/q video file names. q comes first.
            for v in videos:
                if v.endswith("imag.avi"):
                    self.video = v
                    break
                # add this here, because fRT-DC measurements also contain
                # videos ..._proc.avi
                elif v.endswith("imaq.avi"):
                    self.video = v
                    break
        
        # Get contours
        self.contours = {}
        for f in os.listdir(self.fdir):
            if f.endswith("_contours.txt") and f.startswith(self.name[:2]):
                with open(os.path.join(self.fdir, f), "r") as c:
                    # read entire file
                    cdat = c.read(-1)
                for cont in cdat.split("Contour in frame"):
                    cont = cont.strip()
                    if len(cont) == 0:
                        continue
                    cont = cont.splitlines()
                    # the frame is the first number
                    frame = int(cont.pop(0))
                    cont = [ np.fromstring(c.strip("()"), sep=",") for c in cont ]
                    cont = np.array(cont, dtype=np.uint8)
                    self.contours[frame] = cont
    dd=ncfile.createVariable(name,np.dtype('int32').char,('x','y'))
    data_out2=np.int32(((data_out+abs(data_out.min()))/(data_out.max()-data_out.min()))*(np.power(2,31)-1))
    dd[:] = data_out2
    ncfile.close()

data=[]
phase=[]
messpunkte=200 # die Anzahl der Messpunkte pro Resonanzkurve
pixel=200	# anzahl der Resonanzkurven

############## TDMS Datei einlesen ''''''''''''''''''''''''''''''''''''''
fnames=glob("/home/sebadur/Dokumente/Studium/Master/Fit-GUI/Messung 06.11.14/amp*.tdms")    # alle dateien in diesem Ordner mit der Endung TDMS
sorted_fnames = sorted(fnames, key=lambda x: int(x.split('/')[-1].split('amp')[1].split('.')[0]))    # Dateiname aufgeteilt und Nummerisch sortiert
for i in range(pixel):
    tdms_file = TdmsFile(sorted_fnames[i])
    channel = tdms_file.object('Unbenannt', 'Untitled')     # erster Name ist der Gruppenname dann der Kanalname
    data.append(np.array(channel.data))

###################--phase-- ############################
fnames=glob("/home/sebadur/Dokumente/Studium/Master/Fit-GUI/Messung 06.11.14/phase*.tdms")    # alle dateien in diesem Ordner mit der Endung TDMS
sorted_fnames = sorted(fnames, key=lambda x: int(x.split('/')[-1].split('phase')[1].split('.')[0]))    # Dateiname aufgeteilt und Nummerisch sortiert
for i in np.arange(pixel):
    tdms_file = TdmsFile(sorted_fnames[i])
    channel = tdms_file.object('Unbenannt', 'Untitled')     # erster Name ist der Gruppenname dann der Kanalname
    #data[i]=np.array(channel.data)
    phase.append(np.array(channel.data))



if pixel!=len(data[1])/messpunkte:
  print('Check Pixel Setting')
Пример #57
0
    def lade_tdms(self, datei):
        """
        :type datei: str
        :return: Die gemittelten Messwerte aus der angegebenen Datei
        :rtype: numpy.mutliarray.ndarray
        """
        # Beschnittene Daten (links: positiv, rechts: negativ)
        daten = np.zeros(self.par.messpunkte - self.par.bereich_links + self.par.bereich_rechts)
        try:
            tdat = TdmsFile(datei)
            tdms = tdat.object(tdat.groups()[0], 'Untitled')
        except (ValueError, IOError):
            print('Datei ' + datei + ' nicht auslesbar')
            return daten
        index_fehler = False
        for mittelung in range(self.par.mittelungen):
            try:
                """
                Mittelung (durch Addition)
                UND
                Begrenzung des Fitbereichs (zur Eliminierung von parasitären Frequenzpeaks) nach Angabe in GUI
                """
                start = mittelung * self.par.messpunkte
                links = start + self.par.bereich_links
                rechts = start + self.par.messpunkte + self.par.bereich_rechts
                daten += tdms.data[links:rechts]

                """if mittelung == 0:
                    name = raw_input('$')
                    import matplotlib.pyplot as plt
                    plt.title(name+ ": Einzelmessung")
                    plt.xlabel(u"Frequenz / Hz")
                    plt.ylabel(u"Amplitude / µV")
                    plt.plot(
                        self.frequenzen,
                        daten * (1000*1000/50/2.9),
                        antialiased=True
                    )
                    plt.show()
                elif mittelung == self.par.mittelungen-1:
                    name = raw_input('$')
                    import matplotlib.pyplot as plt
                    plt.title(name+ ": 200x gemittelt")
                    plt.xlabel(u"Frequenz / Hz")
                    plt.ylabel(u"Amplitude / µV")
                    plt.plot(
                        self.frequenzen,
                        daten / self.par.mittelungen * (1000*1000/180),
                        antialiased=True
                    )
                    plt.show()"""

            except (ValueError, IndexError):
                """
                In diesem Fall ist ein Messfehler aufgetreten. Das kann (sehr selten) passieren, weshalb der Fit
                dennoch funktionieren muss. Hier ist dann aber ein Einbruch in der Amplitude zu verzeichnen.
                """
                if not index_fehler:
                    index_fehler = True
                    print('Fehlende Messwerte in Datei ' + datei)
        return daten / self.par.mittelungen
	# Create groups data by grouping channels for 'Chart' and Sort Channel Groups so 7' is first plotted.
	channel_groups = channel_list.groupby('Chart', sort=False)

	#Read in Chart File
	charts_data = pd.read_csv(channel_location + 'Charts.csv')

	charts_data = charts_data.set_index('Chart')

	output_location = '../3_charts/Experiment_' + str(experiment) + '/'

	# If the folder doesn't exist create it.
	if not os.path.exists(output_location):
		os.makedirs(output_location)

	#Get Time from TDMS File
	Time = tdms_file.object('Channels', 'Time').data
	Time = [datetime.strptime(t, '%Y-%m-%d %H:%M:%S') for t in Time]
	Start_Time = Time[0]
	End_Time = Time[-1]

	# #Pull Marked Events From TDMS
	# events = tdms_file.object('Events','Event').data
	# event_times = tdms_file.object('Events', 'Time').data
	# event_times = [datetime.strptime(t, '%Y-%m-%d-%H:%M:%S') for t in event_times]

	# event_data = pd.DataFrame({'Events':events,'Times':event_times})

	#Pull Events from csv File
	event_data = pd.read_csv(data_location + 'Experiment_' + str(experiment) + '/Experiment_' + str(experiment) + '_Events.csv')

	for chart in channel_groups.groups:
Пример #59
0
class Probe():
    """
    Class for loading, displaying and analysing probe data,
    and matching video data from .tdms files    
    
    Point to a folder with the .nex files and a single .tdms file
    """
    
    def __init__(self, folder):
        self.folder = folder    
        self.tdms = None
    
    def load_nex(self, channels=np.arange(1,16)):
        """ Channels is a list of channels to read
        
        Read neo segments
        """
        basename = 'SE-CSC-RAW-Ch'
        chnOrder = [12, 2, 14, 4, 6, 3, 8, 1, 10, 7, 5, 11, 9, 15, 13] # to match probe geometry
        self.nexReaders, self.nexSegs = [], []
        for chn in channels:
            fname = basename + str(chnOrder[chn-1]) + '_.nex'
            print 'Reading', fname, '....'
            try:
                r = neo.io.NeuroExplorerIO(filename=(self.folder + fname))
                self.nexReaders.append(r) 
                self.nexSegs.append(r.read_segment(lazy=False, cascade=True))
            except IOError:
                print 'File', fname, 'not found'
    
    def load_tdms(self):
        """ Looks for .tdms files in folder and loads the first one
        it finds
        """
        for fname in os.listdir(self.folder):
            ext = os.path.splitext(fname)[1]            
            if ext=='.tdms': self.tdms = TdmsFile(self.folder+fname)
        if not self.tdms: print 'Tdms file not found' 

    def load_frameSignal(self):
        """ Loads analog input with a TTL high on the onset of each
        video frame
        """
        fname = 'Analog Input Ch16_.nex'
        try:
            self.frameReader = neo.io.NeuroExplorerIO(filename=(self.folder + fname)) 
            self.framesSeg = self.frameReader.read_segment(lazy=False, cascade=True)
        except IOError:
            print 'File', fname, 'not found'        
       
    def get_frameIndex(self):
        """ Get datapoint indices of analog signals that correspond
        to frame onsets 
        
        Cutting the artefact at the beginning of the frames trace for
        TTL detection causes an offset between this and the other analog
        signals, stored for correction in self.framesOffset
        """
        frames = np.array(self.framesSeg.analogsignals[0])
        # Get rid of probe starting artefact to allow TTL detection
        icounter = np.arange(0,len(frames))
        self.framesOffset = icounter[frames<(-1000)][-1]+100
        frames = frames[self.framesOffset:]
        # Get the start of each frame
        dframes = np.diff(frames)
        icounter = np.arange(0,len(dframes))
        self.framesIndex = icounter[dframes>1000]
        self.framesSeg, frames = None, None

    def get_dataWindow(self, win=(0,0), tdms=False):
        """ Load data segments and cut to desired
        time window. 
        
        Option to get the matching window for tdms
        data (position and spot profile)
        
        win is the window (tuple), default is to get everything 
        """
        # Get analog signals
        print 'Getting data windows....'
        self.dataWin = []
        if win==(0,0):
            istart = self.framesIndex[0]  + self.framesOffset
            iend = self.framesIndex[len(self.framesIndex)-1] + self.framesOffset
        else:
            istart = self.framesIndex[win[0]] + self.framesOffset
            iend = self.framesIndex[win[1]] + self.framesOffset           
        for seg in self.nexSegs:
            data = np.array(seg.analogsignals[0])         
            self.dataWin.append(data[istart:iend])
        seg, data = None, None
        # Get tdms
        if tdms:
            try:
                self.pos = self.tdms.object('Real-time Coordinates', 'X-Vertical').data[win[0]:win[1]]    
                self.spot = self.tdms.object('Visual  Stimulation', 'Spot Diameter').data[win[0]:win[1]]
            except AttributeError:
                print 'No TDMS data loaded'

    def get_probeData(self):
        """ Get all probe data
        """
        self.data = []
        for seg in self.nexSegs:
            self.data.append(np.array(seg.analogsignals[0]))         


    def get_timeFreq(self, data, f_start, f_stop, deltafreq):
        """ Calculate spectogram for data and store tfr object
        Data can be a list of signals
        
        Note that sampling rates are hardcoded at the moment
        """
        print 'Calculating spectograms....'
        self.tfrData = []
        for signal in data:
            anasig = neo.AnalogSignal(signal, units='V', t_start=0*pq.s, sampling_rate=30000*pq.Hz)
            self.tfrData.append(TimeFreq(anasig, f_start=f_start, f_stop=f_stop, deltafreq=deltafreq,
                                f0=2.5,  sampling_rate=f_stop*2.))