def get_FCMdata(self, auto_comp=False, **kwargs): """Return the next FCM data set stored in a FCS file""" # parse headers header = self.parse_header(self.cur_offset) # parse text text = self.parse_text(self.cur_offset, header['text_start'], header['text_stop']) # parse annalysis try: astart = text['beginanalysis'] except KeyError: astart = header['analysis_start'] try: astop = text['endanalysis'] except KeyError: astop = header['analysis_end'] analysis = self.parse_analysis(self.cur_offset, astart, astop) # parse data try: dstart = int(text['begindata']) except KeyError: dstart = header['data_start'] try: dstop = int(text['enddata']) except KeyError: dstop = header['data_end'] # account for LMD reporting the wrong values for the size of the data # segment lmd = self.fix_lmd(self.cur_offset, header['text_start'], header['text_stop']) dstop = dstop + lmd data = self.parse_data(self.cur_offset, dstart, dstop, text) # build fcmdata object channels = [] scchannels = [] scchannel_indexes = [] to_transform = [] base_chan_name = [] for i in range(1, int(text['par']) + 1): base_chan_name.append(text['p%dn' % i]) try: if text['p%ds' % i] not in ['', ' ']: name = text['p%ds' % i] else: name = text['p%dn' % i] except KeyError: name = text['p%dn' % i] channels.append(name) # if not name.lower().startswith('fl'): if not is_fl_channel(name): scchannels.append(name) if name != 'Time': scchannel_indexes.append(i - 1) else: # we're a FL channel try: if text['p%dr' % i] == '262144': to_transform.append(i - 1) except KeyError: pass try: unused_path, name = os.path.split(self._fh.name) except AttributeError: name = 'InMemoryFile' name, unused_ext = os.path.splitext(name) tmpfcm = FCMdata( name, data, zip(base_chan_name, channels), scchannels, Annotation({ 'text': text, 'header': header, 'analysis': analysis, })) if self.sidx is not None and self.spill is not None: # if we're passed a spillover we assume we compensate auto_comp = True if auto_comp: if self.sidx is None and self.spill is None: if tmpfcm.get_spill(): spill, sidx = get_spill(tmpfcm.get_spill()) tmpfcm.compensate(sidx=sidx, spill=spill) else: tmpfcm.compensate(sidx=self.sidx, spill=self.spill) if self.transform == 'logicle': try: if isinstance(kwargs['r'], Number): self.r = kwargs['r'] elif numpy.all(numpy.isreal(kwargs['r'])): self.r = numpy.zeros(data.shape[1]) except KeyError: self.r = None if 'T' in kwargs.keys(): T = kwargs['T'] else: T = 262144 if 'm' in kwargs.keys(): m = kwargs['m'] else: m = 4.5 if 'scale_max' in kwargs.keys(): scale_max = kwargs['scale_max'] else: scale_max = 1e5 if 'scale_min' in kwargs.keys(): scale_min = kwargs['scale_min'] else: scale_min = 0 if 'rquant' in kwargs.keys(): rquant = kwargs['rquant'] else: rquant = None if 'w' in kwargs.keys(): w = kwargs['w'] else: w = 0.5 if 'a' in kwargs.keys(): a = kwargs['a'] else: a = 0 if to_transform: tmpfcm.logicle(to_transform, T=T, m=m, r=self.r, w=w, a=a, scale_max=scale_max, scale_min=scale_min, rquant=rquant) elif self.transform == 'log': if to_transform: tmpfcm.log(to_transform) try: tmpfcm._r = self.r except AttributeError: pass try: tmpfcm._w = self.w except AttributeError: pass if 'nextdata' in text: self.cur_offset = int(text['nextdata']) return tmpfcm
def get_FCMdata(self, auto_comp=False, **kwargs): """Return the next FCM data set stored in a FCS file""" # parse headers header = self.parse_header(self.cur_offset) # parse text text = self.parse_text( self.cur_offset, header['text_start'], header['text_stop']) # parse annalysis try: astart = text['beginanalysis'] except KeyError: astart = header['analysis_start'] try: astop = text['endanalysis'] except KeyError: astop = header['analysis_end'] analysis = self.parse_analysis(self.cur_offset, astart, astop) # parse data try: dstart = int(text['begindata']) except KeyError: dstart = header['data_start'] try: dstop = int(text['enddata']) except KeyError: dstop = header['data_end'] # account for LMD reporting the wrong values for the size of the data # segment lmd = self.fix_lmd( self.cur_offset, header['text_start'], header['text_stop']) dstop = dstop + lmd data = self.parse_data(self.cur_offset, dstart, dstop, text) # build fcmdata object channels = [] scchannels = [] scchannel_indexes = [] to_transform = [] base_chan_name = [] for i in range(1, int(text['par']) + 1): base_chan_name.append(text['p%dn' % i]) try: if text['p%ds' % i] not in ['', ' ']: name = text['p%ds' % i] else: name = text['p%dn' % i] except KeyError: name = text['p%dn' % i] channels.append(name) # if not name.lower().startswith('fl'): if not is_fl_channel(name): scchannels.append(name) if name != 'Time': scchannel_indexes.append(i - 1) else: # we're a FL channel try: if text['p%dr' % i] == '262144': to_transform.append(i - 1) except KeyError: pass try: unused_path, name = os.path.split(self._fh.name) except AttributeError: name = 'InMemoryFile' name, unused_ext = os.path.splitext(name) tmpfcm = FCMdata(name, data, zip(base_chan_name, channels), scchannels, Annotation({'text': text, 'header': header, 'analysis': analysis, })) if self.sidx is not None and self.spill is not None: # if we're passed a spillover we assume we compensate auto_comp = True if auto_comp: if self.sidx is None and self.spill is None: if tmpfcm.get_spill(): spill, sidx = get_spill(tmpfcm.get_spill()) tmpfcm.compensate(sidx=sidx, spill=spill) else: tmpfcm.compensate(sidx=self.sidx, spill=self.spill) if self.transform == 'logicle': try: if isinstance(kwargs['r'], Number): self.r = kwargs['r'] elif numpy.all(numpy.isreal(kwargs['r'])): self.r = numpy.zeros(data.shape[1]) except KeyError: self.r = None if 'T' in kwargs.keys(): T = kwargs['T'] else: T = 262144 if 'm' in kwargs.keys(): m = kwargs['m'] else: m = 4.5 if 'scale_max' in kwargs.keys(): scale_max = kwargs['scale_max'] else: scale_max = 1e5 if 'scale_min' in kwargs.keys(): scale_min = kwargs['scale_min'] else: scale_min = 0 if 'rquant' in kwargs.keys(): rquant = kwargs['rquant'] else: rquant = None if 'w' in kwargs.keys(): w = kwargs['w'] else: w = 0.5 if 'a' in kwargs.keys(): a = kwargs['a'] else: a = 0 if to_transform: tmpfcm.logicle( to_transform, T=T, m=m, r=self.r, w=w, a=a, scale_max=scale_max, scale_min=scale_min, rquant=rquant) elif self.transform == 'log': if to_transform: tmpfcm.log(to_transform) try: tmpfcm._r = self.r except AttributeError: pass try: tmpfcm._w = self.w except AttributeError: pass if 'nextdata' in text: self.cur_offset = int(text['nextdata']) return tmpfcm