Example #1
1
 def IgorLoad(self,Source):
     from igor import binarywave,igorpy
     Waves={}
     Variables={}
     
     if Source[-3:] == 'ibw':
         Waves[binarywave.load(Source)['wave']['wave_header']['bname']]=binarywave.load(Source)['wave']['wData']
         Variables['SampleInterval']=1
     elif Source[-3:] == 'pxp':
         #tree=igorpy.load(Source).format()
         #print tree
         #print '#############'
         b=igorpy.load(Source)
         for i in b:
             if isinstance(i, igorpy.Wave):
                 Waves[str(i.name)]=i.data
             elif isinstance(i, igorpy.Variables):
                 Variables=i.uservar    
     elif Source[-3:] == 'txt':
         b=numpy.loadtxt(Source)
         Waves[Source.split("/")[-1].replace('.txt','').replace('.','_')]=b
         Variables=None 
     elif Source[-3:] == 'csv':
         b=numpy.loadtxt(Source)
         Waves[Source.split("/")[-1].replace('.txt','').replace('.','_')]=b
         Variables=None 
     elif Source[-3:] == 'wcp':
         print "not supported yet, but you can import an igor file" 
         b,c=self.read_block(Source)
         for i,j in enumerate(b):
             Waves[str(j[0])+str(i)]=numpy.array(j[1])
         for i in c:
             Variables[i]=c[i]  
     try:     
         return Waves,Variables
     except UnboundLocalError:
         msgBox = QtGui.QMessageBox()
         msgBox.setText(
         """
         <b>Filtype not Supported</b>
         <p>Only txt, ibw and pxp files are supported
         """)
         msgBox.exec_()
         return None, None
Example #2
0
    def read_analogsignal(self, path=None, lazy=False):
        assert not lazy, 'This IO does not support lazy mode'

        if not HAVE_IGOR:
            raise Exception("`igor` package not installed. "
                            "Try `pip install igor`")
        if self.extension == 'ibw':
            data = bw.load(str(self.filename))
            version = data['version']
            if version > 5:
                raise IOError("Igor binary wave file format version {} "
                              "is not supported.".format(version))
        elif self.extension == 'pxp':
            assert type(path) is str, \
                "A colon-separated Igor-style path must be provided."
            if not self._filesystem:
                _, self.filesystem = pxp.load(str(self.filename))
                path = path.split(':')
                location = self.filesystem['root']
                for element in path:
                    if element != 'root':
                        location = location[element.encode('utf8')]
            data = location.wave

        return self._wave_to_analogsignal(data['wave'], [])
Example #3
0
def loadAFM(path):
    data = igorbinary.load(path)
    note = data['wave']['note']

    noteData = {}

    for line in note.splitlines():
        line = line.replace(b' \xb0C', b'')
        line = line.replace(b'\xb0', b'')
        line = str(line, encoding='ascii')

        key, colon, value = line.partition(':')

        value = value.strip()

        try:
            value = float(value)
        except ValueError:
            if ',' in value:
                array = []
                for number in value.split(','):
                    try:
                        array.append(float(number))
                    except ValueError:
                        pass
                value = array

        noteData[key] = value

    data['wave']['note'] = noteData

    return data
Example #4
0
 def _run(self, args):
     wave = load(args.infile)
     numpy.savetxt(args.outfile, wave["wave"]["wData"], fmt="%g", delimiter="\t")
     self.plot_wave(args, wave)
     if args.verbose > 0:
         wave["wave"].pop("wData")
         pprint.pprint(wave)
Example #5
0
 def _run(self, args):
     self.wave = load(
         args.infile
     )  #wave contains the data with col1 as ind and col2 as force
     if self.wave:
         self.wData = np.delete(self.wave['wave']['wData'], 2, 1)
     return
Example #6
0
    def read_analogsignal(self, path=None, lazy=False):
        assert not lazy, 'Do not support lazy'

        if not HAVE_IGOR:
            raise Exception(("`igor` package not installed. "
                             "Try `pip install igor`"))
        if self.extension == 'ibw':
            data = bw.load(self.filename)
            version = data['version']
            if version > 5:
                raise IOError(("Igor binary wave file format version {0} "
                               "is not supported.".format(version)))
        elif self.extension == 'pxp':
            assert type(path) is str, \
                "A colon-separated Igor-style path must be provided."
            _, filesystem = pxp.load(self.filename)
            path = path.split(':')
            location = filesystem['root']
            for element in path:
                if element != 'root':
                    location = location[element.encode('utf8')]
            data = location.wave
        content = data['wave']
        if "padding" in content:
            assert content['padding'].size == 0, \
                "Cannot handle non-empty padding"
        signal = content['wData']
        note = content['note']
        header = content['wave_header']
        name = str(header['bname'].decode('utf-8'))
        units = "".join([x.decode() for x in header['dataUnits']])
        try:
            time_units = "".join([x.decode() for x in header['xUnits']])
            assert len(time_units)
        except:
            time_units = "s"
        try:
            t_start = pq.Quantity(header['hsB'], time_units)
        except KeyError:
            t_start = pq.Quantity(header['sfB'][0], time_units)
        try:
            sampling_period = pq.Quantity(header['hsA'], time_units)
        except:
            sampling_period = pq.Quantity(header['sfA'][0], time_units)
        if self.parse_notes:
            try:
                annotations = self.parse_notes(note)
            except ValueError:
                warn("Couldn't parse notes field.")
                annotations = {'note': note}
        else:
            annotations = {'note': note}

        signal = AnalogSignal(signal, units=units, copy=False, t_start=t_start,
                              sampling_period=sampling_period, name=name,
                              file_origin=self.filename, **annotations)
        return signal
Example #7
0
    def read_analogsignal(self, path=None, lazy=False):
        assert not lazy, 'Do not support lazy'

        if not HAVE_IGOR:
            raise Exception(("`igor` package not installed. "
                             "Try `pip install igor`"))
        if self.extension == 'ibw':
            data = bw.load(self.filename)
            version = data['version']
            if version > 5:
                raise IOError(("Igor binary wave file format version {0} "
                               "is not supported.".format(version)))
        elif self.extension == 'pxp':
            assert type(path) is str, \
                "A colon-separated Igor-style path must be provided."
            _, filesystem = pxp.load(self.filename)
            path = path.split(':')
            location = filesystem['root']
            for element in path:
                if element != 'root':
                    location = location[element.encode('utf8')]
            data = location.wave
        content = data['wave']
        if "padding" in content:
            assert content['padding'].size == 0, \
                "Cannot handle non-empty padding"
        signal = content['wData']
        note = content['note']
        header = content['wave_header']
        name = str(header['bname'].decode('utf-8'))
        units = "".join([x.decode() for x in header['dataUnits']])
        try:
            time_units = "".join([x.decode() for x in header['xUnits']])
            assert len(time_units)
        except:
            time_units = "s"
        try:
            t_start = pq.Quantity(header['hsB'], time_units)
        except KeyError:
            t_start = pq.Quantity(header['sfB'][0], time_units)
        try:
            sampling_period = pq.Quantity(header['hsA'], time_units)
        except:
            sampling_period = pq.Quantity(header['sfA'][0], time_units)
        if self.parse_notes:
            try:
                annotations = self.parse_notes(note)
            except ValueError:
                warn("Couldn't parse notes field.")
                annotations = {'note': note}
        else:
            annotations = {'note': note}

        signal = AnalogSignal(signal, units=units, copy=False, t_start=t_start,
                              sampling_period=sampling_period, name=name,
                              file_origin=self.filename, **annotations)
        return signal
Example #8
0
 def _run(self, args):
     wave = load(args.infile)
     numpy.savetxt(args.outfile,
                   wave['wave']['wData'],
                   fmt='%g',
                   delimiter='\t')
     self.plot_wave(args, wave)
     if args.verbose > 0:
         wave['wave'].pop('wData')
         pprint.pprint(wave)
Example #9
0
def load_data(file_path):
    d = binarywave.load(file_path)
    dat = d['wave']['wData']
    if dat.shape[-1] not in [4, 6]:
        print("invalid file")
        return
    labels = [
        'Height', 'Amplitude 1', 'Amplitude 2', 'Phase 1', 'Phase 2',
        'Resonance Frequency'
    ] if dat.shape[-1] == 6 else ['Height', 'Amplitude', 'Phase', 'z sensor']
    return Panel(dat, minor_axis=labels).transpose(2, 0, 1).to_frame()
Example #10
0
    def load(cls, dirname, filename, IV, IF, time, features):
        path = os.path.join(dirname, filename)
        data = binarywave.load(path)['wave']['wData']
        time = np.linspace(0, time, num=data.size, endpoint=False)

        a, b, c, d, e, f = os.path.basename(filename)[:-4].split('_')
        fileinfo = Fileinfo(a, b, int(c), int(d), int(e), f)

        injection = _calculate_current(fileinfo, IV, IF)

        return cls(filename, fileinfo, injection, time, data, features)
def load_data(file_path):
    d = binarywave.load(file_path)
    dat = d['wave']['wData']
    if dat.shape[-1] not in [4, 6]:
        print("invalid file")
        return
    labels = [
        'Height', 'Amplitude 1', 'Amplitude 2',
        'Phase 1', 'Phase 2', 'Resonance Frequency'
    ] if dat.shape[-1] == 6 else [
        'Height', 'Amplitude', 'Phase', 'z sensor'
    ]
    return Panel(dat, minor_axis=labels).transpose(2,0,1).to_frame()
def load_IgorWave__(path):  #    This reads Igor wave
    r = io.IgorIO(filename=path)
    data = bw.load(path)
    timestamp = int(data["wave"]["wave_header"]["modDate"])
    analogsignal_number = int(data["wave"]['wave_header']['nDim'][1])
    sampling__ = float(r.read_analogsignal().sampling_rate)
    sweep_position__ = int(r.filename.split('\\R')[1].split('_')[0])
    if analogsignal_number > 1:
        sweep_lag = np.array(str(data["wave"]['note']).split(
            '%SweepStartTimes^>')[1].split('|')[0:analogsignal_number],
                             dtype=np.float64)
    else:
        sweep_lag = [0]
    return r, sampling__, sweep_position__, timestamp, sweep_lag
Example #13
0
 def loadbinary(self, bpath):
     thewave=ibw.load(bpath)
     if thewave['wave']['wave_header']['nDim'][1:].sum() > 0:
         print("Error: "+os.path.split(bpath)[1]+" > one dimension")
     else:
         h=thewave['wave']['wave_header']
         self.wavename = h['bname']
         self.xdelta = h['sfA'][0]
         self.data = thewave['wave']['wData']
         self.pcsrB = self.data.size
         for string in h['dimUnits'][0]:
             self.xUnits += string.decode()
         for string in h['dataUnits']:
             self.dataUnits += string.decode()
Example #14
0
    def load_from_ibw(self, filename) :
        """
        Load scan data from an IGOR binary wave file. Luckily someone has 
        already written an interface for this (the python `igor` package).
        """
        wave = binarywave.load(filename)['wave']
        data = np.array([wave['wData']])

        # The `header` contains some metadata
        header = wave['wave_header']
        nDim = header['nDim']
        steps = header['sfA']
        starts = header['sfB']

        # Construct the x and y scales from start, stop and n
        yscale = start_step_n(starts[0], steps[0], nDim[0])
        xscale = start_step_n(starts[1], steps[1], nDim[1])

        # Convert `note`, which is a bytestring of ASCII characters that 
        # contains some metadata, to a list of strings
        note = wave['note']
        note = note.decode('ASCII').split('\r')

        # Now the extraction fun begins. Most lines are of the form 
        # `Some-kind-of-name=some-value`
        metadata = dict()
        for line in note :
            # Split at '='. If it fails, we are not in a line that contains 
            # useful information
            try :
                name, val = line.split('=')
            except ValueError :
                continue
            # Put the pair in a dictionary for later access
            metadata.update({name: val})
        
        # NOTE Unreliable hv
        hv = metadata['Excitation Energy']
        res = Namespace(
                data = data,
                xscale = xscale,
                yscale = yscale,
                zscale = None,
                angles = xscale,
                theta = 0,
                phi = 0,
                E_b = 0,
                hv = hv)
        return res
Example #15
0
def readibw(fdname, fname):
    # the code needs to be a founder outside individual expmt folder
    # Get the current directory
    _thisDir = os.getcwd()
    # name of the folder & the file. e.g. fdname = "041917fly2cell1", # fname = "ch0_2.ibw"
    # directory of the file
    f_dir = os.path.join(_thisDir, fdname, 'waves', fname).replace("\\", "/")
    #print(f_dir)
    # load the wave
    wave = load(f_dir)
    v = wave['wave']['wData']  # voltage
    af = wave['wave']['wave_header']['sfA'][
        0]  # 1/(acquisition frequency in s).
    t = np.linspace(0, len(v) * af, len(v))  # generate time
    return (v, t)
Example #16
0
    def load(cls, dirname, filename, IV, IF, endtime, features):
        path = os.path.join(dirname, filename)
        dat = binarywave.load(path)
        data = dat['wave']['wData']
        if dat['version'] == 2:
            dt = dat['wave']['wave_header']['hsA']
        elif dat['version'] == 5:
            dt = dat['wave']['wave_header']['sfA'][0]
        numpts = binarywave.load(path)['wave']['wave_header']['npnts']
        tot_time = dt * numpts
        #time = np.linspace(0, endtime, num=data.size, endpoint=False)
        time = np.linspace(0, tot_time, num=numpts, endpoint=False)
        #optionally shorten the data
        #if endtime<tot_time:
        #    end_index=np.abs(time-endtime).argmin()
        #    data=data[0:end_index]
        #    time=time[0:end_index]

        a, b, c, d, e, f = os.path.basename(filename)[:-4].split('_')
        fileinfo = Fileinfo(a, b, int(c), int(d), int(e), f)

        injection = _calculate_current(fileinfo, IV, IF)

        return cls(filename, fileinfo, injection, time, data, features)
Example #17
0
    def load(cls, dirname, filename, IV, IF, params, IVseries, time):
        path = os.path.join(dirname, filename)
        wave = binarywave.load(path)['wave']
        data = wave['wData']
        dtfile = wave['wave_header']['hsA']
        time = np.arange(len(data)) * dtfile

        #what does basename do: it just returns the filename
        #why is this fileinfo important - allows us to refer to aspects of the filename
        a, b, c, d, e, f, g = os.path.basename(filename)[:-8].split('_')
        fileinfo = Fileinfo(a + b + c, d, int(e), int(f), int(g))

        injection = _calculate_current(fileinfo, IV, IF, IVseries)

        return cls(filename, fileinfo, injection, time, data, params)
Example #18
0
    def read_analogsignal(self, lazy=False, cascade=True):
        if not HAVE_IGOR:
            raise Exception(
                "igor package not installed. Try `pip install igor`")
        data = bw.load(self.filename)
        version = data['version']
        if version > 3:
            raise IOError(
                "Igor binary wave file format version {0} is not supported.".
                format(version))
        content = data['wave']
        if "padding" in content:
            assert content[
                'padding'].size == 0, "Cannot handle non-empty padding"
        if lazy:
            # not really lazy, since the `igor` module loads the data anyway
            signal = np.array((), dtype=content['wData'].dtype)
        else:
            signal = content['wData']
        note = content['note']
        header = content['wave_header']
        name = header['bname']
        assert header['botFullScale'] == 0
        assert header['topFullScale'] == 0
        units = "".join(header['dataUnits'])
        time_units = "".join(header['xUnits']) or "s"
        t_start = pq.Quantity(header['hsB'], time_units)
        sampling_period = pq.Quantity(header['hsA'], time_units)
        if self.parse_notes:
            try:
                annotations = self.parse_notes(note)
            except ValueError:
                warn("Couldn't parse notes field.")
                annotations = {'note': note}
        else:
            annotations = {'note': note}

        signal = AnalogSignal(signal,
                              units=units,
                              copy=False,
                              t_start=t_start,
                              sampling_period=sampling_period,
                              name=name,
                              file_origin=self.filename,
                              **annotations)
        if lazy:
            signal.lazy_shape = content['wData'].shape
        return signal
Example #19
0
def load_and_prep( ibw_location ):
    '''Loads an ibw file
    
    Given the location of an ibw file, this function loads the file and cleans
    the data as specified in 'cleanup_ibw_labels()' and 'cleanup_note().' The
    returned ibw is stored as a dictionary.'''
    
    #Code from the igor package loads the .ibw file
    loadedibw = igoribw.load(ibw_location)
    
    #the labels are rearranged to be more amenable to our analysis
    cleanup_ibw_labels( loadedibw )
    
    #The note, which stores all of the metadata, is rearranged to be more searchable
    cleanup_note( loadedibw )
    
    return loadedibw
Example #20
0
def exper_spikes(wavedir, expername, TBS_ending, axes, fig, stim_times):
    FileDir = wavedir + expername + TBS_ending
    TBSfiles = glob.glob(FileDir)
    print expername, "NUM FILES:", len(TBSfiles)
    if (len(TBSfiles) == 0):
        print "No files in:", FileDir
        return 0, 0
    else:
        TBSfiles = sorted(TBSfiles, key=sortorder)
        numspikes = np.zeros(len(TBSfiles))
        peaktime = []
        #read in data file
        for i, filename in enumerate(TBSfiles):
            #read in data
            data = binarywave.load(filename)
            trace = data['wave']['wData']
            dt = data['wave']['wave_header']['hsA']
            stim_index = [int(round(st / dt)) for st in stim_times]
            npnts = data['wave']['wave_header']['npnts']
            endtime = len(trace) * dt
            tracetime = np.arange(
                0, endtime,
                dt)  #array of points between 0 and endtime stepping by dt
            #
            #find peaks and peaktime using modification of Zbyszek's find_spikes
            peaks, thresholds = find_spikes([tracetime, trace])
            peaktime.append(tracetime[peaks])
            numspikes[i] = len(peaks)
            #plot trace and peaks to verify correct identification
            axes[(i / num_windows)].plot(tracetime, trace)
            for peak in peaks:
                if peak in stim_index:
                    axes[(i / num_windows)].plot(
                        tracetime[peak],
                        trace[peak],
                        '^',
                        label=filename.split('/')[-1].split('_')[5:7])
                    print filename.split(
                        '/')[-1], "peaks:", peaks, "check:", tracetime[peak]
                    axes[(i / num_windows)].legend(fontsize='xx-small')
                else:
                    axes[(i / num_windows)].plot(tracetime[peak], trace[peak],
                                                 '*r')
        fig.canvas.draw()
        pyplot.show()
        return peaktime, numspikes
def load_data(file_path):
    d = binarywave.load(file_path)
    dat = d["wave"]["wData"]
    if dat.shape[-1] not in [4, 6]:
        print ("invalid file")
        return
    labels = (
        [
            "Height",
            "Amplitude 1",
            "Amplitude 2",
            "Phase 1",
            "Phase 2",
            "Resonance Frequency",
        ]
        if dat.shape[-1] == 6
        else ["Height", "Amplitude", "Phase", "z sensor"]
    )
    return Panel(dat, minor_axis=labels).transpose(2, 0, 1).to_frame()
Example #22
0
def load_specs_ibw(filename):
    dat = igor.load(filename)
    mat = dat['wave']['wData']
    wavenote = load_wave_note(dat)
    ke = float(wavenote['Kinetic Energy'])
    pe = float(wavenote['Pass Energy'])
    e_delta = (0.000060241 - 0.00000030146 * pe) * pe * (1920 / mat.shape[0])
    e_offset = ke - e_delta * mat.shape[0] * (937 / 1920)
    a_offset = -16.430
    a_delta = 0.0255102 * (1200 / mat.shape[1])
    energy = e_offset + e_delta * np.arange(mat.shape[0])
    angle = a_offset + a_delta * np.arange(mat.shape[1])
    return xr.DataArray(mat,
                        coords={
                            'energy': energy,
                            'slit': angle
                        },
                        dims=('energy', 'slit'),
                        attrs=wavenote)
Example #23
0
def load_wave(file_name):
    '''
    trace, sr, stim = load_wave(file_name)
        Load trace from an igor data file, as well as sampleing rate and stimulation
        amplitude.
    parameters:
        file_name (string) - directory of the igor data file
    return:
        trace (array_like) - data trace in the file, return as numpy array
        sr (float) - sampling rate
        stim (array_like)- stimulation step properties in an array, including start time,
            duration and amplitude
    '''

    try:
        sr, stim_amp, stim_dur, stim_start = 10000, 0, 0, 0
        data = binarywave.load(file_name)
        trace = data['wave']['wData']
        # Search for sampling rate
        searched = re.search(r'XDelta\(s\):(.*?);',
                             data['wave']['note'].decode())
        if (searched != None):
            sr = 1 / float(searched.group(1))
        # Search for stimulation amplitude
        searched = re.search(r';Stim Amp.:(.+?);',
                             data['wave']['note'].decode())
        if (searched != None):
            stim_amp = float(searched.group(1))
        # Search for stimulation duration
        searched = re.search(r';Width:(.+?);', data['wave']['note'].decode())
        if (searched != None):
            stim_dur = float(searched.group(1))
        # Search for stimulation strat
        searched = re.search(r';StepStart\(s\):(.+?);',
                             data['wave']['note'].decode())
        if (searched != None):
            stim_start = float(searched.group(1))
        return (trace, sr, [stim_start, stim_dur, stim_amp])
    except IOError:
        print('Igor wave file (' + file_name + ') reading error')
        raise IOError
Example #24
0
def load_ibw(path, flatten=True):
    """
    Given a path to an Igor Binary Wave, return the image file as a 3 dimensional 
    numpy array.
    
    Input:
        path: string file path to .ibw file
        flatten (optional): boolean input to flatten topography data.
    Output:
        data: 3 dimensional numpy array containing .ibw data.
    """
    data = load(path)['wave']['wData']

    # Flatten the topography data by extracting any linear response.
    if flatten == True:   
        flat_topo = data.copy()
        flat_topo[:, :, 0] = detrend(flat_topo[:, :, 0])
        data = flat_topo
        
    data = np.rot90(data)    
    return data
Example #25
0
def read_from_handle(f):
    """Reads Igor's (Wavemetric) binary wave format, .ibw or .bwav, files.

    Args:
        f(file): file handle

    Returns:
        A tuple of (headerType instance, numpy vector) where `headerType
        instance` contains a meta info about the wave and `numpy vector`
        contains wave data. `numpy vector` is writeable.
    """

    data = binarywave.load(f)
    version = data['version']
    assert version in (2, 3, 5), "Fileversion is '" + \
                                 str(version) + "', not supported"

    content = data['wave']
    wdata = np.copy(content['wData'])
    header = IgorHeader(version, content)
    return header, wdata
Example #26
0
    def read_analogsignal(self, lazy=False, cascade=True):
        if not HAVE_IGOR:
            raise Exception("igor package not installed. Try `pip install igor`")
        data = bw.load(self.filename)
        version = data['version']
        if version > 3:
            raise IOError("Igor binary wave file format version {0} is not supported.".format(version))
        content = data['wave']
        if "padding" in content:
            assert content['padding'].size == 0, "Cannot handle non-empty padding"
        if lazy:
            # not really lazy, since the `igor` module loads the data anyway
            signal = np.array((), dtype=content['wData'].dtype)
        else:
            signal = content['wData']
        note = content['note']
        header = content['wave_header']
        name = header['bname']
        assert header['botFullScale'] == 0
        assert header['topFullScale'] == 0
        units = "".join(header['dataUnits'])
        time_units = "".join(header['xUnits']) or "s"
        t_start = pq.Quantity(header['hsB'], time_units)
        sampling_period = pq.Quantity(header['hsA'], time_units)
        if self.parse_notes:
            try:
                annotations = self.parse_notes(note)
            except ValueError:
                warn("Couldn't parse notes field.")
                annotations = {'note': note}
        else:
            annotations = {'note': note}

        signal = AnalogSignal(signal, units=units, copy=False, t_start=t_start,
                              sampling_period=sampling_period, name=name,
                              file_origin=self.filename, **annotations)
        if lazy:
            signal.lazy_shape = content['wData'].shape
        return signal
Example #27
0
# -*- coding: utf-8 -*-
"""
This code is written to generate the Chlorophyll colormap.
Therefore, instead of importing it, it should just be run.
I'm looking into a more proper way to do this.

Created on Mon Aug 13 2018

@author: joe
"""

import igor.binarywave as igoribw
import numpy as np
from matplotlib.colors import LinearSegmentedColormap
import matplotlib.pyplot as plt

chl = igoribw.load('Chlorophyll.ibw')
colordata = np.transpose(chl['wave']['wData'])
chlor = {}
L = len(np.transpose(colordata))
chlor['red'] = [(i / (L - 1), colordata[0][i] / 65533, colordata[0][i] / 65533)
                for i, x in enumerate(colordata[0])]
chlor['green'] = [(i / (L - 1), colordata[1][i] / 65533,
                   colordata[1][i] / 65533)
                  for i, x in enumerate(colordata[0])]
chlor['blue'] = [(i / (L - 1), colordata[2][i] / 65533,
                  colordata[2][i] / 65533) for i, x in enumerate(colordata[0])]
chlorophyll = LinearSegmentedColormap('Chlorophyll', chlor)
plt.register_cmap(cmap=chlorophyll)
Example #28
0
        tracestring = []
        #-0.5e9 converts to nA and leak due to 5 mV (not 10 mV) Vm change
        RampLeak = zeros(numtraces)

        fig = pyplot.figure(figsize=(6, 6))
        fig.canvas.set_window_title('Experiment ' + ExperName)
        axes = fig.add_subplot(111)

        #loop through each trace from each series

        for fileindex, eachfile in enumerate(filenames):
            parts = eachfile.split('_')
            tracestring.append(
                ExperName + '_' + parts[-3] + "_" + parts[-2] + "      "
            )  #extra spaces forces column_stack to use more significant figures when converting floats to string.  There should be a better way
            data = binarywave.load(eachfile)  #read in data
            trace = data['wave']['wData']
            LeakVsTime = zeros(len(trace))
            #pp(data)             # optional - look at data array to see what else is there
            dtfile = data['wave']['wave_header'][
                'hsA']  #verify that the dt we have is correct
            if (dt != dtfile):
                print "Error, dt of file is different than expected"
            #calculate various measures
            Ihold[fileindex] = mean(trace[basestartpnt:baseEndpnt])
            IMin[fileindex] = min(trace[PeakStartpnt:PeakEndpnt])
            mintimePt = trace[PeakStartpnt:PeakEndpnt].argmin() + PeakStartpnt
            mintime = mintimePt * dt
            maxtimePt = trace[PeakStartpnt:maxEndpt].argmax() + PeakStartpnt
            maxtime = maxtimePt * dt
            IMax[fileindex] = mean(trace[maxtimePt - 10:maxtimePt + 10])
Example #29
0
    def translate(self, file_path, verbose=False, parm_encoding='utf-8'):
        """
        Translates the provided file to .h5

        Parameters
        ----------
        file_path : String / unicode
            Absolute path of the .ibw file
        verbose : Boolean (Optional)
            Whether or not to show  print statements for debugging
        parm_encoding : str, optional
            Codec to be used to decode the bytestrings into Python strings if needed.
            Default 'utf-8'

        Returns
        -------
        h5_path : String / unicode
            Absolute path of the .h5 file
        """
        file_path = path.abspath(file_path)
        # Prepare the .h5 file:
        folder_path, base_name = path.split(file_path)
        base_name = base_name[:-4]
        h5_path = path.join(folder_path, base_name + '.h5')
        if path.exists(h5_path):
            remove(h5_path)

        h5_file = h5py.File(h5_path, 'w')

        # Load the ibw file first
        ibw_obj = bw.load(file_path)
        ibw_wave = ibw_obj.get('wave')
        parm_dict = self._read_parms(ibw_wave, parm_encoding)
        chan_labels, chan_units = self._get_chan_labels(ibw_wave, parm_encoding)

        if verbose:
            print('Channels and units found:')
            print(chan_labels)
            print(chan_units)

        # Get the data to figure out if this is an image or a force curve
        images = ibw_wave.get('wData')

        if images.shape[2] != len(chan_labels):
            chan_labels = chan_labels[1:]  # for layer 0 null set errors in older AR software

        if images.ndim == 3:  # Image stack
            if verbose:
                print('Found image stack of size {}'.format(images.shape))
            type_suffix = 'Image'

            num_rows = parm_dict['ScanLines']
            num_cols = parm_dict['ScanPoints']

            images = images.transpose(2, 1, 0)  # now ordered as [chan, Y, X] image
            images = np.reshape(images, (images.shape[0], -1, 1))  # 3D [chan, Y*X points,1]

            pos_desc = [Dimension('X', 'm', np.linspace(0, parm_dict['FastScanSize'], num_cols)),
                        Dimension('Y', 'm', np.linspace(0, parm_dict['SlowScanSize'], num_rows))]

            spec_desc = Dimension('arb', 'a.u.', [1])

        else:  # single force curve
            if verbose:
                print('Found force curve of size {}'.format(images.shape))

            type_suffix = 'ForceCurve'
            images = np.atleast_3d(images)  # now [Z, chan, 1]
            images = images.transpose((1, 2, 0))  # [chan ,1, Z] force curve

            # The data generated above varies linearly. Override.
            # For now, we'll shove the Z sensor data into the spectroscopic values.

            # Find the channel that corresponds to either Z sensor or Raw:
            try:
                chan_ind = chan_labels.index('ZSnsr')
                spec_data = np.atleast_2d(VALUES_DTYPE(images[chan_ind]))
            except ValueError:
                try:
                    chan_ind = chan_labels.index('Raw')
                    spec_data = np.atleast_2d(VALUES_DTYPE(images[chan_ind]))
                except ValueError:
                    # We don't expect to come here. If we do, spectroscopic values remains as is
                    spec_data = np.arange(images.shape[2])

            pos_desc = Dimension('X', 'm', [1])
            spec_desc = Dimension('Z', 'm', spec_data)

        # Create measurement group
        meas_grp = create_indexed_group(h5_file, 'Measurement')

        # Write file and measurement level parameters
        global_parms = generate_dummy_main_parms()
        global_parms['data_type'] = 'IgorIBW_' + type_suffix
        global_parms['translator'] = 'IgorIBW'
        write_simple_attrs(h5_file, global_parms)

        write_simple_attrs(meas_grp, parm_dict)

        # Create Position and spectroscopic datasets
        h5_pos_inds, h5_pos_vals = write_ind_val_dsets(meas_grp, pos_desc, is_spectral=False)
        h5_spec_inds, h5_spec_vals = write_ind_val_dsets(meas_grp, spec_desc, is_spectral=True)

        # Prepare the list of raw_data datasets
        for chan_data, chan_name, chan_unit in zip(images, chan_labels, chan_units):
            chan_grp = create_indexed_group(meas_grp, 'Channel')

            write_main_dataset(chan_grp, np.atleast_2d(chan_data), 'Raw_Data',
                               chan_name, chan_unit,
                               None, None,
                               h5_pos_inds=h5_pos_inds, h5_pos_vals=h5_pos_vals,
                               h5_spec_inds=h5_spec_inds, h5_spec_vals=h5_spec_vals,
                               dtype=np.float32)

        if verbose:
            print('Finished preparing raw datasets')

        h5_file.close()
        return h5_path
Example #30
0
    def ibwToNx(self, inputfile, entryId=None, rotation=None):
        filecontent = igorbw.load(inputfile)
        filename = os.path.basename(inputfile).split(".")[0]
        header = {}
        entryname = None
        wave = []
        axes = []

        print "---- Start: ibw->nexus ----"
        print "Parsing: " + filename

        # Loop through and find header data
        entryname = str(copy(filecontent["wave"]["wave_header"]["bname"]))
        wave = np.asarray(copy(filecontent["wave"]["wData"]))

        note = str(copy(filecontent["wave"]["note"]))
        noteList = [x.split("=") for x in note.split("\r")]
        noteList = [x for x in noteList if len(x) == 2]
        noteDict = dict(noteList)

        # Check if file is loaded correctly
        if entryname and len(wave.shape) == 2:
            print "File seems valid"
        else:
            print "File not valid"
            return -1

        axesSteps = filecontent["wave"]["wave_header"]["sfA"]
        axesInit = filecontent["wave"]["wave_header"]["sfB"]
        axesDim = filecontent["wave"]["wave_header"]["nDim"]
        axesUnits = filecontent["wave"]["wave_header"]["dimUnits"]

        axes = [
            np.linspace(axesInit[0], axesDim[0] * axesSteps[0] + axesInit[0], axesDim[0]),
            np.linspace(axesInit[1], axesDim[1] * axesSteps[1] + axesInit[1], axesDim[1]),
        ]
        units = ["".join(axesUnits[0]), "".join(axesUnits[1])]

        print "Create nexus format"
        # Nexus format
        if entryId == None:
            entry = nxtemplate.arpes("entry1")
        else:
            entry = nxtemplate.arpes(entryId)

        entry.title = entryname
        # Meta data
        if "Ep" in noteDict:
            entry.instrument.analyser.pass_energy = noteDict["Ep"]
            entry.instrument.analyser.pass_energy.units = "eV"
        if "LensMode" in noteDict:
            entry.instrument.analyser.lens_mode = noteDict["LensMode"].strip("\x00")
        if "Ek" in noteDict:
            entry.instrument.analyser.kinetic_energy = noteDict["Ek"]
            entry.instrument.analyser.kinetic_energy.units = "eV"
        entry.instrument.manipulator.rangle = rotation
        entry.instrument.manipulator.rangle.units = "deg"

        if int(wave.shape[0]) == len(axes[0]) and int(wave.shape[1]) == len(axes[1]):
            wave = wave.transpose()

            # Data
        data = nx.NXfield(wave, name="data")
        energies = nx.NXfield(axes[0], units=units[0], name="energies")
        angles = nx.NXfield(axes[1], units=units[1], name="angles")
        entry.analyser = nx.NXdata(data, (angles, energies))

        print "Done with " + entryname
        self.nxEntry = entry
        print "---- End: ibw->nexus ------"
        return self
Example #31
0
    def translate(self, file_path, verbose=False, parm_encoding='utf-8', ftype='FF',
                  subfolder='Measurement_000', h5_path='', channel_label_name=True):
        """
        Translates the provided file to .h5
        Adapted heavily from pycroscopy IBW file, modified to work with Ginger format

        :param file_path: Absolute path of the .ibw file
        :type file_path: String / unicode
        
        :param verbose: Whether or not to show  print statements for debugging
        :type verbose: boolean, optional
        
        :param parm_encoding: Codec to be used to decode the bytestrings into Python strings if needed.
            Default 'utf-8'
        :type parm_encoding: str, optional
            
        :param ftype: Delineates Ginger Lab imaging file type to be imported (not case-sensitive)
            'FF' : FF-trEFM
            'SKPM' : FM-SKPM
            'ringdown' : Ringdown
            'trEFM' : normal trEFM
        :type ftype: str, optional
        
        :param subfolder: Specifies folder under root (/) to save data in. Default is standard pycroscopy format
        :type subfolder: str, optional
        
        :param h5_path: Existing H5 file to append to
        :type h5_path: str, optional
        
        :param channel_label_name: If True, uses the Channel as the subfolder name (e.g. Height, Phase, Amplitude, Charging)
        :type channel_label_name: bool, optional
        
        :returns: Absolute path of the .h5 file
        :rtype: String / unicode
            
        """

        # Prepare the .h5 file:
        if not any(h5_path):
            folder_path, base_name = path.split(file_path)
            base_name = base_name[:-4]
            h5_path = path.join(folder_path, base_name + '.h5')
            # hard-coded exception, rarely occurs but can be useful
            if path.exists(h5_path):
                h5_path = path.join(folder_path, base_name + '_00.h5')

        h5_file = h5py.File(h5_path, 'w')

        # If subfolder improperly formatted
        if subfolder == '':
            subfolder = '/'

        # Load the ibw file first
        ibw_obj = bw.load(file_path)
        ibw_wave = ibw_obj.get('wave')
        parm_dict = self._read_parms(ibw_wave, parm_encoding)
        chan_labels, chan_units = self._get_chan_labels(ibw_wave, parm_encoding)
        if verbose:
            print('Channels and units found:')
            print(chan_labels)
            print(chan_units)

        # Get the data to figure out if this is an image or a force curve
        images = ibw_wave.get('wData')

        if images.shape[2] != len(chan_labels):
            chan_labels = chan_labels[1:]  # for weird null set errors in older AR software

        # Check if a Ginger Lab format ibw (has 'UserIn' in channel labels)
        _is_gl_type = any(['UserIn0' in str(s) for s in chan_labels])
        if _is_gl_type == True:
            chan_labels = self._get_image_type(chan_labels, ftype)

        if verbose:
            print('Processing image type', ftype, 'with channels', chan_labels)

        type_suffix = 'Image'

        num_rows = ibw_wave['wave_header']['nDim'][1]  # lines
        num_cols = ibw_wave['wave_header']['nDim'][0]  # points
        num_imgs = ibw_wave['wave_header']['nDim'][2]  # layers
        unit_scale = self._get_unit_factor(''.join([str(s)[-2] for s in ibw_wave['wave_header']['dimUnits'][0][0:2]]))
        data_scale = self._get_unit_factor(str(ibw_wave['wave_header']['dataUnits'][0])[-2])

        parm_dict['FastScanSize'] = unit_scale * num_cols * ibw_wave['wave_header']['sfA'][0]
        parm_dict['SlowScanSize'] = unit_scale * num_rows * ibw_wave['wave_header']['sfA'][1]

        images = images.transpose(2, 0, 1)  # now ordered as [chan, Y, X] image
        images = np.reshape(images, (images.shape[0], -1, 1))  # 3D [chan, Y*X points,1]

        pos_desc = [Dimension(name='X', units='m', values=np.linspace(0, parm_dict['FastScanSize'], num_cols)),
                    Dimension(name='Y', units='m', values=np.linspace(0, parm_dict['SlowScanSize'], num_rows))]
        spec_desc = [Dimension(name='arb', units='a.u.', values=[1])]

        # Create Position and spectroscopic datasets
        h5_pos_inds, h5_pos_vals = write_ind_val_dsets(h5_file['/'], pos_desc, is_spectral=False)
        h5_spec_inds, h5_spec_vals = write_ind_val_dsets(h5_file['/'], spec_desc, is_spectral=True)

        # Prepare the list of raw_data datasets
        for chan_data, chan_name, chan_unit in zip(images, chan_labels, chan_units):
            chan_grp = create_indexed_group(h5_file['/'], chan_name)
            write_main_dataset(chan_grp, np.atleast_2d(chan_data), 'Raw_Data',
                               chan_name, chan_unit,
                               pos_desc, spec_desc,
                               dtype=np.float32)

        if verbose:
            print('Finished writing all channels')

        h5_file.close()
        return h5_path
Example #32
0
def transfer_function(h5_file,
                      tf_file='',
                      params_file='',
                      psd_freq=1e6,
                      offset=0.0016,
                      sample_freq=10e6,
                      plot=False):
    '''
	Reads in the transfer function .ibw, then creates two datasets within
	a parent folder 'Transfer_Function'
	
	This will destructively overwrite an existing Transfer Function in there
	
	1) TF (transfer function)
	2) Freq (frequency axis for computing Fourier Transforms)
	
	:param h5_file:
	:type h5_file:
	
	:param tf_file: Transfer Function .ibw File
	:type tf_file: ibw
		
	:param params_file: The filepath in string format for the parameters file containing
			Q, AMPINVOLS, etc.
	:type params_file: string
		
	:param psd_freq: The maximum range of the Power Spectral Density.
		For Asylum Thermal Tunes, this is often 1 MHz on MFPs and 2 MHz on Cyphers
	:type psd_freq: float
		
	:param offset: To avoid divide-by-zero effects since we will divide by the transfer function
			when generating GKPFM data
	:type offset: float
		
	:param sample_freq: The desired output sampling. This should match your data.   
	:type sample_freq: float
		
	:param plot:
	:type plot: bool, optional
	
	:returns: the Transfer Function group
	:rtype:
	'''
    if not any(tf_file):
        tf_file = usid.io_utils.file_dialog(
            caption='Select Transfer Function file ',
            file_filter='IBW Files (*.ibw)')
    data = bw.load(tf_file)
    tf = data.get('wave').get('wData')

    if 'Transfer_Function' in h5_file:
        del h5_file['/Transfer_Function']
    h5_file.create_group('Transfer_Function')
    h5_file['Transfer_Function'].create_dataset('TF', data=tf)

    freq = np.linspace(0, psd_freq, len(tf))
    h5_file['Transfer_Function'].create_dataset('Freq', data=freq)

    parms = params_list(params_file, psd_freq=psd_freq)

    for k in parms:
        h5_file['Transfer_Function'].attrs[k] = float(parms[k])

    tfnorm = float(parms['Q']) * (tf - np.min(tf)) / (np.max(tf) - np.min(tf))
    tfnorm += offset
    h5_file['Transfer_Function'].create_dataset('TFnorm', data=tfnorm)

    TFN_RS, FQ_RS = resample_tf(h5_file,
                                psd_freq=psd_freq,
                                sample_freq=sample_freq)
    TFN_RS = float(parms['Q']) * (TFN_RS - np.min(TFN_RS)) / (np.max(TFN_RS) -
                                                              np.min(TFN_RS))
    TFN_RS += offset

    h5_file['Transfer_Function'].create_dataset('TFnorm_resampled',
                                                data=TFN_RS)
    h5_file['Transfer_Function'].create_dataset('Freq_resampled', data=FQ_RS)

    if plot:
        plt.figure()
        plt.plot(freq, tfnorm, 'b')
        plt.plot(FQ_RS, TFN_RS, 'r')
        plt.xlabel('Frequency (Hz)')
        plt.ylabel('Amplitude (m)')
        plt.yscale('log')
        plt.title('Transfer Function')

    return h5_file['Transfer_Function']
Example #33
0
tracestring = []

fig = pyplot.figure(figsize=(6, 6))
fig.canvas.set_window_title('Experiment ' + args.experiment)
axes = fig.add_subplot(111)

#loop through each trace from each series
tempRMP = []
tempAccess = []
goodtraces = 0
badcount = 0
reallybad = 0
baselineVm = 0
for i, filename in enumerate(filenames):
    data = binarywave.load(filename)
    trace = data['wave']['wData']
    Vm = np.mean(trace[basestartpnt:baseendpnt])
    tempRMP.append(Vm)
    access = (tempRMP[i] - np.mean(trace[hyperstartpnt:hyperendpnt])
              ) / Iaccess / 1e6  #converts access units to megaohms
    tempAccess.append(access)

    if (i == 9):
        baselineVm = np.mean(tempRMP)  #this is mean over traces
        baselineAccess = np.mean(tempAccess)

    if i >= 10:
        if (abs(Vm - baselineVm) / abs(baselineVm)) > 0.2 or (
                abs(access - baselineAccess) / abs(baselineAccess)
        ) > .4:  #20% baseline change or 40% access change
def get_header_dict(filename):
    return binarywave.load(filename)["wave"]["wave_header"]
Example #35
0
    def translate(self, file_path, verbose=False, parm_encoding='utf-8'):
        """
        Translates the provided file to .h5

        Parameters
        ----------
        file_path : String / unicode
            Absolute path of the .ibw file
        verbose : Boolean (Optional)
            Whether or not to show  print statements for debugging
        parm_encoding : str, optional
            Codec to be used to decode the bytestrings into Python strings if needed.
            Default 'utf-8'

        Returns
        -------
        h5_path : String / unicode
            Absolute path of the .h5 file
        """

        # Load the ibw file first
        ibw_obj = bw.load(file_path)
        ibw_wave = ibw_obj.get('wave')
        parm_dict = self._read_parms(ibw_wave, parm_encoding)
        chan_labels, chan_units = self._get_chan_labels(
            ibw_wave, parm_encoding)
        if verbose:
            print('Channels and units found:')
            print(chan_labels)
            print(chan_units)

        # Get the data to figure out if this is an image or a force curve
        images = ibw_wave.get('wData')
        if images.ndim == 3:  # Image stack
            if verbose:
                print('Found image stack of size {}'.format(images.shape))
            type_suffix = 'Image'

            num_rows = parm_dict['ScanLines']
            num_cols = parm_dict['ScanPoints']

            images = images.transpose(2, 0,
                                      1)  # now ordered as [chan, Y, X] image
            images = np.reshape(
                images, (images.shape[0], -1, 1))  # 3D [chan, Y*X points,1]

            ds_pos_ind, ds_pos_val = build_ind_val_dsets(
                [num_cols, num_rows],
                is_spectral=False,
                steps=[
                    1.0 * parm_dict['FastScanSize'] / num_cols,
                    1.0 * parm_dict['SlowScanSize'] / num_rows
                ],
                labels=['X', 'Y'],
                units=['m', 'm'],
                verbose=verbose)

            ds_spec_inds, ds_spec_vals = build_ind_val_dsets([1],
                                                             is_spectral=True,
                                                             steps=[1],
                                                             labels=['arb'],
                                                             units=['a.u.'],
                                                             verbose=verbose)

        else:  # single force curve
            if verbose:
                print('Found force curve of size {}'.format(images.shape))

            type_suffix = 'ForceCurve'
            images = np.atleast_3d(images)  # now [Z, chan, 1]
            images = images.transpose((1, 2, 0))  # [chan ,1, Z] force curve

            ds_pos_ind, ds_pos_val = build_ind_val_dsets([1],
                                                         is_spectral=False,
                                                         steps=[25E-9],
                                                         labels=['X'],
                                                         units=['m'],
                                                         verbose=verbose)

            ds_spec_inds, ds_spec_vals = build_ind_val_dsets([images.shape[2]],
                                                             is_spectral=True,
                                                             labels=['Z'],
                                                             units=['m'],
                                                             verbose=verbose)
            # The data generated above varies linearly. Override.
            # For now, we'll shove the Z sensor data into the spectroscopic values.

            # Find the channel that corresponds to either Z sensor or Raw:
            try:
                chan_ind = chan_labels.index('ZSnsr')
                ds_spec_vals.data = np.atleast_2d(np.float32(images[chan_ind]))
            except ValueError:
                try:
                    chan_ind = chan_labels.index('Raw')
                    ds_spec_vals.data = np.atleast_2d(
                        np.float32(images[chan_ind]))
                except ValueError:
                    # We don't expect to come here. If we do, spectroscopic values remains as is
                    pass

        # Prepare the list of raw_data datasets
        chan_raw_dsets = list()
        for chan_data, chan_name, chan_unit in zip(images, chan_labels,
                                                   chan_units):
            ds_raw_data = MicroDataset('Raw_Data',
                                       data=np.atleast_2d(chan_data),
                                       dtype=np.float32,
                                       compression='gzip')
            ds_raw_data.attrs['quantity'] = chan_name
            ds_raw_data.attrs['units'] = [chan_unit]
            chan_raw_dsets.append(ds_raw_data)
        if verbose:
            print('Finished preparing raw datasets')

        # Prepare the tree structure
        # technically should change the date, etc.
        spm_data = MicroDataGroup('')
        global_parms = generate_dummy_main_parms()
        global_parms['data_type'] = 'IgorIBW_' + type_suffix
        global_parms['translator'] = 'IgorIBW'
        spm_data.attrs = global_parms
        meas_grp = MicroDataGroup('Measurement_000')
        meas_grp.attrs = parm_dict
        spm_data.addChildren([meas_grp])

        if verbose:
            print('Finished preparing tree trunk')

        # Prepare the .h5 file:
        folder_path, base_name = path.split(file_path)
        base_name = base_name[:-4]
        h5_path = path.join(folder_path, base_name + '.h5')
        if path.exists(h5_path):
            remove(h5_path)

        # Write head of tree to file:
        hdf = ioHDF5(h5_path)
        # spm_data.showTree()
        hdf.writeData(spm_data, print_log=verbose)

        if verbose:
            print('Finished writing tree trunk')

        # Standard list of auxiliary datasets that get linked with the raw dataset:
        aux_ds_names = [
            'Position_Indices', 'Position_Values', 'Spectroscopic_Indices',
            'Spectroscopic_Values'
        ]

        # Create Channels, populate and then link:
        for chan_index, raw_dset in enumerate(chan_raw_dsets):
            chan_grp = MicroDataGroup(
                '{:s}{:03d}'.format('Channel_', chan_index),
                '/Measurement_000/')
            chan_grp.attrs['name'] = raw_dset.attrs['quantity']
            chan_grp.addChildren(
                [ds_pos_ind, ds_pos_val, ds_spec_inds, ds_spec_vals, raw_dset])
            h5_refs = hdf.writeData(chan_grp, print_log=verbose)
            h5_raw = getH5DsetRefs(['Raw_Data'], h5_refs)[0]
            linkRefs(h5_raw, getH5DsetRefs(aux_ds_names, h5_refs))

        if verbose:
            print('Finished writing all channels')

        hdf.close()
        return h5_path
Example #36
0
    def translate(self, file_path, verbose=False, append_path='', 
                  grp_name='Measurement', parm_encoding='utf-8'):
        """
        Translates the provided file to .h5

        Parameters
        ----------
        file_path : String / unicode
            Absolute path of the .ibw file
        verbose : Boolean (Optional)
            Whether or not to show  print statements for debugging
        append_path : string (Optional)
            h5_file to add these data to, must be a path to the h5_file on disk
        grp_name : string (Optional)
            Change from default "Measurement" name to something specific
        parm_encoding : str, optional
            Codec to be used to decode the bytestrings into Python strings if needed.
            Default 'utf-8'

        Returns
        -------
        h5_path : String / unicode
            Absolute path of the .h5 file
        """
        file_path = path.abspath(file_path)
        # Prepare the .h5 file:
        folder_path, base_name = path.split(file_path)
        base_name = base_name[:-4]
        
        if not append_path:
            h5_path = path.join(folder_path, base_name + '.h5')
            if path.exists(h5_path):
                remove(h5_path)
            h5_file = h5py.File(h5_path, 'w')
        else:
            h5_path = append_path
            if not path.exists(append_path):
                raise Exception('File does not exist. Check pathname.')
            h5_file = h5py.File(h5_path, 'r+')
        

        # Load the ibw file first
        ibw_obj = bw.load(file_path)
        ibw_wave = ibw_obj.get('wave')
        parm_dict = self._read_parms(ibw_wave, parm_encoding)
        chan_labels, chan_units = self._get_chan_labels(ibw_wave, parm_encoding)

        if verbose:
            print('Channels and units found:')
            print(chan_labels)
            print(chan_units)

        # Get the data to figure out if this is an image or a force curve
        images = ibw_wave.get('wData')

        if images.shape[-1] != len(chan_labels):
            chan_labels = chan_labels[1:]  # for layer 0 null set errors in older AR software

        if images.ndim == 3:  # Image stack
            if verbose:
                print('Found image stack of size {}'.format(images.shape))
            type_suffix = 'Image'

            num_rows = parm_dict['ScanLines']
            num_cols = parm_dict['ScanPoints']

            images = images.transpose(2, 1, 0)  # now ordered as [chan, Y, X] image
            images = np.reshape(images, (images.shape[0], -1, 1))  # 3D [chan, Y*X points,1]

            pos_desc = [Dimension('X', 'm', np.linspace(0, parm_dict['FastScanSize'], num_cols)),
                        Dimension('Y', 'm', np.linspace(0, parm_dict['SlowScanSize'], num_rows))]

            spec_desc = Dimension('arb', 'a.u.', [1])

        else:  # single force curve
            if verbose:
                print('Found force curve of size {}'.format(images.shape))

            type_suffix = 'ForceCurve'
            images = np.atleast_3d(images)  # now [Z, chan, 1]
            images = images.transpose((1, 2, 0))  # [chan ,1, Z] force curve

            # The data generated above varies linearly. Override.
            # For now, we'll shove the Z sensor data into the spectroscopic values.

            # Find the channel that corresponds to either Z sensor or Raw:
            try:
                chan_ind = chan_labels.index('ZSnsr')
                spec_data = VALUES_DTYPE(images[chan_ind]).squeeze()
            except ValueError:
                try:
                    chan_ind = chan_labels.index('Raw')
                    spec_data = VALUES_DTYPE(images[chan_ind]).squeeze()
                except ValueError:
                    # We don't expect to come here. If we do, spectroscopic values remains as is
                    spec_data = np.arange(images.shape[2])

            pos_desc = Dimension('X', 'm', [1])
            spec_desc = Dimension('Z', 'm', spec_data)

        # Create measurement group
        meas_grp = create_indexed_group(h5_file, grp_name)

        # Write file and measurement level parameters
        global_parms = generate_dummy_main_parms()
        global_parms['data_type'] = 'IgorIBW_' + type_suffix
        global_parms['translator'] = 'IgorIBW'
        write_simple_attrs(h5_file, global_parms)

        write_simple_attrs(meas_grp, parm_dict)

        # Create Position and spectroscopic datasets
        h5_pos_inds, h5_pos_vals = write_ind_val_dsets(meas_grp, pos_desc, is_spectral=False)
        h5_spec_inds, h5_spec_vals = write_ind_val_dsets(meas_grp, spec_desc, is_spectral=True)

        # Prepare the list of raw_data datasets
        for chan_data, chan_name, chan_unit in zip(images, chan_labels, chan_units):
            if verbose:
                print('channel', chan_name)
                print('unit', chan_unit)
            chan_grp = create_indexed_group(meas_grp, 'Channel')

            write_main_dataset(chan_grp, np.atleast_2d(chan_data), 'Raw_Data',
                               chan_name, chan_unit,
                               None, None,
                               h5_pos_inds=h5_pos_inds, h5_pos_vals=h5_pos_vals,
                               h5_spec_inds=h5_spec_inds, h5_spec_vals=h5_spec_vals,
                               dtype=np.float32)

        if verbose:
            print('Finished preparing raw datasets')

        h5_file.close()
        return h5_path
Example #37
0
    def read(self, verbose=False, parm_encoding='utf-8'):
        """
        Reads the file given in file_path into a sidpy dataset

        Parameters
        ----------
        verbose : Boolean (Optional)
            Whether or not to show  print statements for debugging
        parm_encoding : str, optional
            Codec to be used to decode the bytestrings into Python strings if
            needed. Default 'utf-8'

        Returns
        -------
        sidpy.Dataset : List of sidpy.Dataset objects.
            Multi-channel inputs are separated into individual dataset objects
        """
        file_path = self._input_file_path

        # Load the ibw file first
        ibw_obj = bw.load(file_path)
        ibw_wave = ibw_obj.get('wave')
        parm_dict = self._read_parms(ibw_wave, parm_encoding)
        chan_labels, chan_units = self._get_chan_labels(ibw_wave, parm_encoding)

        if verbose:
            print('Channels and units found:')
            print(chan_labels)
            print(chan_units)

        # Get the data to figure out if this is an image or a force curve
        images = ibw_wave.get('wData')

        datasets = [] #list of sidpy datasets

        if images.shape[-1] != len(chan_labels):
            chan_labels = chan_labels[1:]  # for layer 0 null set errors in older AR software

        if images.ndim == 3:  # Image stack
            if verbose:
                print('Found image stack of size {}'.format(images.shape))

            num_rows = parm_dict['ScanLines']
            num_cols = parm_dict['ScanPoints']

            for channel in range(images.shape[-1]):
                #Convert it to sidpy dataset object
                data_set = sid.Dataset.from_array(images[:,:,channel], name='Image')
                data_set.data_type = 'Image'

                #Add quantity and units
                data_set.units = chan_units[channel]
                data_set.quantity = chan_labels[channel]

                #Add dimension info
                data_set.set_dimension(0, sid.Dimension(np.linspace(0, parm_dict['FastScanSize'], num_cols),
                                                        name = 'x',
                                                        units=chan_units[channel], quantity = 'x',
                                                        dimension_type='spatial'))
                data_set.set_dimension(1, sid.Dimension(np.linspace(0, parm_dict['SlowScanSize'], num_rows),
                                                        name = 'y',
                                                        units=chan_units[channel], quantity='y',
                                                        dimension_type='spatial'))

                # append metadata
                data_set.metadata = parm_dict
                data_set.data_type = 'image'

                #Finally, append it
                datasets.append(data_set)

        else:  # single force curve
            if verbose:
                print('Found force curve of size {}'.format(images.shape))

            images = np.atleast_3d(images)  # now [Z, chan, 1]

            # Find the channel that corresponds to either Z sensor or Raw:
            try:
                chan_ind = chan_labels.index('ZSnsr')
                spec_data = images[:,chan_ind].squeeze()
            except ValueError:
                try:
                    chan_ind = chan_labels.index('Raw')
                    spec_data = images[:,chan_ind,0].squeeze()
                except ValueError:
                    # We don't expect to come here. If we do, spectroscopic values remains as is
                    spec_data = np.arange(images.shape[2])

            #Go through the channels
            for channel in range(images.shape[-2]):

                # The data generated above varies linearly. Override.
                # For now, we'll shove the Z sensor data into the spectroscopic values.

                #convert to sidpy dataset
                data_set = sid.Dataset.from_array((images[:,channel,0]), name='Force Curve (from Igor)')

                #Set units, quantity
                data_set.units = chan_units[channel]
                data_set.quantity = chan_labels[channel]

                data_set.set_dimension(0, sid.Dimension('Z', spec_data,
                                                        units=chan_units[0], quantity=chan_labels[0],
                                                        dimension_type='spectral'))

                #append metadata
                data_set.data_type = 'line_plot'
                data_set.metadata = parm_dict

                #Add dataset to list
                datasets.append(data_set)

        # Return the dataset
        return datasets
Example #38
0
def run(args):
    wave = load(args.infile)
    numpy.savetxt(args.outfile, wave['wave']['wData'], fmt='%g', delimiter='\t')
    if args.verbose > 0:
        wave['wave'].pop('wData')
        pprint.pprint(wave)