コード例 #1
0
class TimedSpecies(ModuleBase):
    inputName = Input('filtered')
    outputName = Output('timedSpecies')
    Species_1_Name = CStr('Species1')
    Species_1_Start = Float(0)
    Species_1_Stop = Float(1e6)

    Species_2_Name = CStr('')
    Species_2_Start = Float(0)
    Species_2_Stop = Float(0)

    Species_3_Name = CStr('')
    Species_3_Start = Float(0)
    Species_3_Stop = Float(0)

    def execute(self, namespace):
        inp = namespace[self.inputName]
        mapped = tabular.mappingFilter(inp)
        timedSpecies = self.populateTimedSpecies()

        mapped.addColumn('ColourNorm', np.ones_like(mapped['t'], 'float'))
        for species in timedSpecies:
            mapped.addColumn('p_%s' % species['name'],
                             (mapped['t'] >= species['t_start']) *
                             (mapped['t'] < species['t_end']))

        if 'mdh' in dir(inp):
            mapped.mdh = inp.mdh
            mapped.mdh['TimedSpecies'] = timedSpecies

        namespace[self.outputName] = mapped

    def populateTimedSpecies(self):
        ts = []
        if self.Species_1_Name:
            ts.append({
                'name': self.Species_1_Name,
                't_start': self.Species_1_Start,
                't_end': self.Species_1_Stop
            })

        if self.Species_2_Name:
            ts.append({
                'name': self.Species_2_Name,
                't_start': self.Species_2_Start,
                't_end': self.Species_2_Stop
            })

        if self.Species_3_Name:
            ts.append({
                'name': self.Species_3_Name,
                't_start': self.Species_3_Start,
                't_end': self.Species_3_Stop
            })

        return ts
コード例 #2
0
class ClusterStats(ModuleBase):

    inputName = Input('with_clumps')
    IDkey = CStr('clumpIndex')
    StatMethod = Enum(['std', 'min', 'max', 'mean', 'median', 'count', 'sum'])
    StatKey = CStr('x')
    outputName = Output('withClumpStats')

    def execute(self, namespace):
        from scipy.stats import binned_statistic

        inp = namespace[self.inputName]
        mapped = tabular.mappingFilter(inp)

        ids = inp[self.IDkey]  # I imagine this needs to be an int type key
        prop = inp[self.StatKey]
        maxid = int(ids.max())
        edges = -0.5 + np.arange(maxid + 2)
        resstat = binned_statistic(ids,
                                   prop,
                                   statistic=self.StatMethod,
                                   bins=edges)

        mapped.addColumn(self.StatKey + "_" + self.StatMethod, resstat[0][ids])

        # propogate metadata, if present
        try:
            mapped.mdh = inp.mdh
        except AttributeError:
            pass

        namespace[self.outputName] = mapped

    @property
    def _key_choices(self):
        #try and find the available column names
        try:
            return sorted(self._parent.namespace[self.inputName].keys())
        except:
            return []

    @property
    def default_view(self):
        from traitsui.api import View, Group, Item
        from PYME.ui.custom_traits_editors import CBEditor

        return View(Item('inputName',
                         editor=CBEditor(choices=self._namespace_keys)),
                    Item('_'),
                    Item('IDkey', editor=CBEditor(choices=self._key_choices)),
                    Item('StatKey',
                         editor=CBEditor(choices=self._key_choices)),
                    Item('StatMethod'),
                    Item('_'),
                    Item('outputName'),
                    buttons=['OK'])
コード例 #3
0
class HistByID(ModuleBase):
    """Plot histogram of a column by ID"""
    inputName = Input('measurements')
    IDkey = CStr('objectID')
    histkey = CStr('qIndex')
    outputName = Output('outGraph')
    nbins = Int(50)
    minval = Float(float('nan'))
    maxval = Float(float('nan'))

    def execute(self, namespace):
        import math
        meas = namespace[self.inputName]
        ids = meas[self.IDkey]

        uid, valsu = uniqueByID(ids, meas[self.histkey])
        if math.isnan(self.minval):
            minv = valsu.min()
        else:
            minv = self.minval
        if math.isnan(self.maxval):
            maxv = valsu.max()
        else:
            maxv = self.maxval

        import matplotlib.pyplot as plt
        plt.figure()
        plt.hist(valsu, self.nbins, range=(minv, maxv))
        plt.xlabel(self.histkey)

    @property
    def _key_choices(self):
        #try and find the available column names
        try:
            return sorted(self._parent.namespace[self.inputName].keys())
        except:
            return []

    @property
    def default_view(self):
        from traitsui.api import View, Group, Item
        from PYME.ui.custom_traits_editors import CBEditor

        return View(Item('inputName',
                         editor=CBEditor(choices=self._namespace_keys)),
                    Item('_'),
                    Item('IDkey', editor=CBEditor(choices=self._key_choices)),
                    Item('histkey',
                         editor=CBEditor(choices=self._key_choices)),
                    Item('nbins'),
                    Item('minval'),
                    Item('maxval'),
                    Item('_'),
                    Item('outputName'),
                    buttons=['OK'])
コード例 #4
0
class ValidClumps(ModuleBase):

    inputName = Input('with_clumps')
    inputValid = Input('valid_clumps')
    IDkey = CStr('clumpIndex')
    outputName = Output('with_validClumps')

    def execute(self, namespace):

        inp = namespace[self.inputName]
        valid = namespace[self.inputValid]
        mapped = tabular.mappingFilter(inp)

        # note: in coalesced data the clumpIndices are float!
        # this creates issues in comparisons unless these are converted to int before comparisons are made!!
        # that is the reason for the rint and astype conversions below
        ids = np.rint(inp[self.IDkey]).astype('i')
        validIDs = np.in1d(ids,
                           np.unique(np.rint(valid[self.IDkey]).astype('i')))

        mapped.addColumn('validID',
                         validIDs.astype('f'))  # should be float or int?

        # propogate metadata, if present
        try:
            mapped.mdh = inp.mdh
        except AttributeError:
            pass

        namespace[self.outputName] = mapped
コード例 #5
0
class ClusterTimeRange(ModuleBase):

    inputName = Input('dbscanClustered')
    IDkey = CStr('dbscanClumpID')
    outputName = Output('withTrange')

    def execute(self, namespace):
        from scipy.stats import binned_statistic

        inp = namespace[self.inputName]
        mapped = tabular.mappingFilter(inp)

        ids = inp[self.IDkey]
        t = inp['t']
        maxid = int(ids.max())
        edges = -0.5 + np.arange(maxid + 2)
        resmin = binned_statistic(ids, t, statistic='min', bins=edges)
        resmax = binned_statistic(ids, t, statistic='max', bins=edges)
        trange = resmax[0][ids] - resmin[0][ids] + 1

        mapped.addColumn('trange', trange)

        # propogate metadata, if present
        try:
            mapped.mdh = inp.mdh
        except AttributeError:
            pass

        namespace[self.outputName] = mapped
コード例 #6
0
class QindexRatio(ModuleBase):
    inputName = Input('qindex')
    outputName = Output('qindex-calibrated')
    qIndexDenom = CStr('qIndex1')
    qIndexNumer = CStr('qIndex2')
    qIndexRatio = CStr('qRatio')

    def execute(self, namespace):
        inp = namespace[self.inputName]
        mapped = tabular.mappingFilter(inp)
        qkey1 = self.qIndexDenom
        qkey2 = self.qIndexNumer

        v2 = inp[qkey2]
        ratio = np.zeros_like(v2, dtype='float64')
        qigood = v2 > 0
        ratio[qigood] = inp[qkey1][qigood] / v2[qigood]

        mapped.addColumn(self.qIndexRatio, ratio)
        namespace[self.outputName] = mapped

    @property
    def _key_choices(self):
        #try and find the available column names
        try:
            return sorted(self._parent.namespace[self.inputName].keys())
        except:
            return []

    @property
    def default_view(self):
        from traitsui.api import View, Group, Item
        from PYME.ui.custom_traits_editors import CBEditor

        return View(Item('inputName',
                         editor=CBEditor(choices=self._namespace_keys)),
                    Item('_'),
                    Item('qIndexDenom',
                         editor=CBEditor(choices=self._key_choices)),
                    Item('qIndexNumer',
                         editor=CBEditor(choices=self._key_choices)),
                    Item('qIndexRatio'),
                    Item('_'),
                    Item('outputName'),
                    buttons=['OK'])
コード例 #7
0
class ScatterbyID(ModuleBase):
    """Take just certain columns of a variable"""
    inputName = Input('measurements')
    IDkey = CStr('objectID')
    xkey = CStr('qIndex')
    ykey = CStr('objArea')
    outputName = Output('outGraph')

    def execute(self, namespace):
        meas = namespace[self.inputName]
        ids = meas[self.IDkey]
        uid, x = uniqueByID(ids, meas[self.xkey])
        uid, y = uniqueByID(ids, meas[self.ykey])

        import pylab
        pylab.figure()
        pylab.scatter(x, y)

        pylab.grid()
        pylab.xlabel(self.xkey)
        pylab.ylabel(self.ykey)
        #namespace[self.outputName] = out

    @property
    def _key_choices(self):
        #try and find the available column names
        try:
            return sorted(self._parent.namespace[self.inputName].keys())
        except:
            return []

    @property
    def default_view(self):
        from traitsui.api import View, Group, Item
        from PYME.ui.custom_traits_editors import CBEditor

        return View(Item('inputName',
                         editor=CBEditor(choices=self._namespace_keys)),
                    Item('_'),
                    Item('IDkey', editor=CBEditor(choices=self._key_choices)),
                    Item('xkey', editor=CBEditor(choices=self._key_choices)),
                    Item('ykey', editor=CBEditor(choices=self._key_choices)),
                    Item('_'),
                    Item('outputName'),
                    buttons=['OK'])
コード例 #8
0
class RuleChain(HasTraits):
    post_on = Enum(POST_CHOICES)
    protocol = CStr('')

    def __init__(self, rule_factories=None, *args, **kwargs):
        if rule_factories is None:
            rule_factories = list()
        self.rule_factories = rule_factories
        HasTraits.__init__(self, *args, **kwargs)
コード例 #9
0
ファイル: base.py プロジェクト: b3nroll1ns/python-microscopy
class OutputModule(ModuleBase):
    """
    Output modules are the one exception to the recipe-module functional (no side effects) programming
    paradigm and are used to perform IO and save or display designated outputs/endpoints from the recipe.
    
    As such, they should act solely as a sink, and should not do any processing or write anything back
    to the namespace.
    """
    filePattern = CStr('{output_dir}/{file_stub}.csv')
    scheme = Enum('File', 'pyme-cluster://', 'pyme-cluster:// - aggregate')

    def _schemafy_filename(self, out_filename):
        if self.scheme == 'File':
            return out_filename
        elif self.scheme == 'pyme-cluster://':
            from PYME.IO import clusterIO
            import os
            return os.path.join(clusterIO.local_dataroot,
                                out_filename.lstrip('/'))
        elif self.scheme == 'pyme-cluster:// - aggregate':
            raise RuntimeError('Aggregation not suported')

    def _check_outputs(self):
        """
        This function exists to help with debugging when writing a new recipe module.

        Over-ridden here for the special case IO modules derived from OutputModule as these are permitted to
        have side-effects, but not permitted to have classical outputs to the namespace and will execute
        (when appropriate) regardless.
        """
        if len(self.outputs) != 0:
            raise RuntimeError(
                'Output modules should not write anything to the namespace')

    def generate(self, namespace, recipe_context={}):
        """
        Function to be called from within dh5view (rather than batch processing). Some outputs are ignored, in which
        case this function returns None.
        
        Parameters
        ----------
        namespace

        Returns
        -------

        """
        return None

    def execute(self, namespace):
        """
        Output modules be definition do nothing when executed - they act as a sink and implement a save method instead.

        """
        pass
コード例 #10
0
class FitProfiles(ModuleBase):
    inputName = Input('profiles')

    fit_type = CStr(list(profile_fitters.non_ensemble_fitters.keys())[0])

    outputName = Output('fit_results')

    def execute(self, namespace):

        inp = namespace[self.inputName]

        # generate LineProfileHandler from tables
        handler = LineProfileHandler()
        handler._load_profiles_from_list(inp)

        fit_class = profile_fitters.non_ensemble_fitters[self.fit_type]
        self.fitter = fit_class(handler)

        self.fitter.fit_profiles()

        res = tabular.RecArraySource(self.fitter.results)

        # propagate metadata, if present
        res.mdh = MetaDataHandler.NestedClassMDHandler(
            getattr(inp, 'mdh', None))

        res.mdh['FitProfiles.FitType'] = self.fit_type

        namespace[self.outputName] = res

    @property
    def _fitter_choices(self):
        return list(profile_fitters.non_ensemble_fitters.keys())

    @property
    def default_view(self):
        from traitsui.api import View, Item
        from PYME.ui.custom_traits_editors import CBEditor

        return View(Item('inputName',
                         editor=CBEditor(choices=self._namespace_keys)),
                    Item('_'),
                    Item('fit_type',
                         editor=CBEditor(choices=self._fitter_choices)),
                    Item('_'),
                    Item('outputName'),
                    buttons=['OK'])

    @property
    def pipeline_view(self):
        from traitsui.api import View, Item
        from PYME.ui.custom_traits_editors import CBEditor

        return View(
            Item('fit_type', editor=CBEditor(choices=self._fitter_choices)), )
コード例 #11
0
class FlatAndDarkCorrect(ModuleBase):
    input_image = Input('input')
    flatfield_filename = CStr('')
    darkmap_filename = CStr('')
    output_name = Output('corrected')

    def execute(self, namespace):
        from quant_condensate import FlatfieldDarkCorrectedDataSource
        from PYME.IO.image import ImageStack
        image = namespace[self.input_image]

        if self.flatfield_filename == '':
            flat = None
        else:
            flat = ImageStack(
                filename=self.flatfield_filename).data[:, :, 0].squeeze()

        if not self.darkmap_filename == '':
            dark = ImageStack(
                filename=self.darkmap_filename).data[:, :, 0].squeeze()
        else:
            dark = None

        ffd = FlatfieldDarkCorrectedDataSource.DataSource(image.data,
                                                          image.mdh,
                                                          flatfield=flat,
                                                          dark=dark)

        im = ImageStack(ffd, titleStub=self.output_name)
        im.mdh.copyEntriesFrom(image.mdh)
        im.mdh['Parent'] = image.filename
        if self.darkmap_filename:
            im.mdh['FlatAndDarkCorrect.Darkmap'] = self.darkmap_filename
        if self.flatfield_filename:
            im.mdh['FlatAndDarkCorrect.Flatmap'] = self.flatfield_filename
        namespace[self.output_name] = im
コード例 #12
0
ファイル: base.py プロジェクト: csoeller/PYME-extra
class ExtractChannelByName(ModuleBase):
    """Extract one channel from an image using regular expression matching to image channel names - by default this is case insensitive"""
    inputName = Input('input')
    outputName = Output('filtered_image')

    channelNamePattern = CStr('channel0')
    caseInsensitive = Bool(True)

    def _matchChannels(self, channelNames):
        # we put this into its own function so that we can call it externally for testing
        import re
        flags = 0
        if self.caseInsensitive:
            flags |= re.I
        idxs = [
            i for i, c in enumerate(channelNames)
            if re.search(self.channelNamePattern, c, flags)
        ]
        return idxs

    def _pickChannel(self, image):
        channelNames = image.mdh['ChannelNames']
        idxs = self._matchChannels(channelNames)
        if len(idxs) < 1:
            raise RuntimeError(
                "Expression '%s' did not match any channel names" %
                self.channelNamePattern)
        if len(idxs) > 1:
            raise RuntimeError(
                ("Expression '%s' did match more than one channel name: " %
                 self.channelNamePattern) +
                ', '.join([channelNames[i] for i in idxs]))
        idx = idxs[0]

        chan = image.data[:, :, :, idx]

        im = ImageStack(chan, titleStub='Filtered Image')
        im.mdh.copyEntriesFrom(image.mdh)
        im.mdh['ChannelNames'] = [channelNames[idx]]
        im.mdh['Parent'] = image.filename

        return im

    def execute(self, namespace):
        namespace[self.outputName] = self._pickChannel(
            namespace[self.inputName])
コード例 #13
0
class PointFeatureBase(ModuleBase):
    """
    common base class for feature extraction routines - implements normalisation and PCA routines
    """

    outputColumnName = CStr('features')
    columnForEachFeature = Bool(
        False
    )  #if true, outputs a column for each feature - useful for visualising

    normalise = Bool(True)  #subtract mean and divide by std. deviation

    PCA = Bool(
        True
    )  # reduce feature dimensionality by performing PCA - TODO - should this be a separate module and be chained instead?
    PCA_components = Int(3)  # 0 = same dimensionality as features

    def _process_features(self, data, features):
        from PYME.IO import tabular
        out = tabular.MappingFilter(data)
        out.mdh = getattr(data, 'mdh', None)

        if self.normalise:
            features = features - features.mean(0)[None, :]
            features = features / features.std(0)[None, :]

        if self.PCA:
            from sklearn.decomposition import PCA

            pca = PCA(n_components=(
                self.PCA_components if self.PCA_components > 0 else None
            )).fit(features)
            features = pca.transform(features)

            out.pca = pca  #save the pca object just in case we want to look at what the principle components are (this is hacky)

        out.addColumn(self.outputColumnName, features)

        if self.columnForEachFeature:
            for i in range(features.shape[1]):
                out.addColumn('feat_%d' % i, features[:, i])

        return out
コード例 #14
0
class QindexScale(ModuleBase):
    inputName = Input('qindex')
    outputName = Output('qindex-calibrated')
    qIndexkey = CStr('qIndex')
    qindexValue = Float(1.0)
    NEquivalent = Float(1.0)

    def execute(self, namespace):
        inp = namespace[self.inputName]
        mapped = tabular.mappingFilter(inp)
        qkey = self.qIndexkey
        scaled = inp[qkey]
        qigood = inp[qkey] > 0
        scaled[
            qigood] = inp[qkey][qigood] * self.NEquivalent / self.qindexValue

        self.newKey = '%sCal' % qkey
        mapped.addColumn(self.newKey, scaled)
        namespace[self.outputName] = mapped

    @property
    def _key_choices(self):
        #try and find the available column names
        try:
            return sorted(self._parent.namespace[self.inputName].keys())
        except:
            return []

    @property
    def default_view(self):
        from traitsui.api import View, Group, Item
        from PYME.ui.custom_traits_editors import CBEditor

        return View(Item('inputName',
                         editor=CBEditor(choices=self._namespace_keys)),
                    Item('_'),
                    Item('qIndexkey',
                         editor=CBEditor(choices=self._key_choices)),
                    Item('qindexValue'),
                    Item('NEquivalent'),
                    Item('_'),
                    Item('outputName'),
                    buttons=['OK'])
コード例 #15
0
class MergeClumpsTperiod(ModuleBase):
    """
    Create a new mapping object which derives mapped keys from original ones.
    Also adds the time period of bursts by adding tmin and tmax columns for each clump.
    """
    inputName = Input('clumped')
    outputName = Output('merged')
    labelKey = CStr('clumpIndex')

    def execute(self, namespace):
        from PYME.Analysis.points.DeClump import pyDeClump
        from PYME.Analysis.points.DeClump import deClump as deClumpC

        inp = namespace[self.inputName]

        grouped = pyDeClump.mergeClumps(inp, labelKey=self.labelKey)

        # work out tmin and tmax
        I = np.argsort(inp[self.labelKey])
        sorted_src = {k: inp[k][I] for k in [self.labelKey, 't']}
        # tmin and tmax - tentative addition
        NClumps = int(np.max(sorted_src[self.labelKey]) + 1)
        tmin = deClumpC.aggregateMin(NClumps,
                                     sorted_src[self.labelKey].astype('i'),
                                     sorted_src['t'].astype('f'))
        tmax = -deClumpC.aggregateMin(NClumps,
                                      sorted_src[self.labelKey].astype('i'),
                                      -1.0 * sorted_src['t'].astype('f'))
        grouped.addColumn('tmin', tmin)
        grouped.addColumn('tmax', tmax)

        try:
            grouped.mdh = inp.mdh
        except AttributeError:
            pass

        namespace[self.outputName] = grouped
コード例 #16
0
class EnsembleFitProfiles(ModuleBase):
    inputName = Input('line_profiles')

    fit_type = CStr(list(profile_fitters.ensemble_fitters.keys())[0])
    ensemble_parameter_guess = Float(50.)
    hold_ensemble_parameter_constant = Bool(False)

    outputName = Output('fit_results')

    def execute(self, namespace):

        inp = namespace[self.inputName]

        # generate LineProfileHandler from tables
        handler = LineProfileHandler()
        handler._load_profiles_from_list(inp)

        fit_class = profile_fitters.ensemble_fitters[self.fit_type]
        self.fitter = fit_class(handler)

        if self.hold_ensemble_parameter_constant:
            self.fitter.fit_profiles(self.ensemble_parameter_guess)
        else:
            self.fitter.ensemble_fit(self.ensemble_parameter_guess)

        res = tabular.RecArraySource(self.fitter.results)

        # propagate metadata, if present
        res.mdh = MetaDataHandler.NestedClassMDHandler(
            getattr(inp, 'mdh', None))

        res.mdh['EnsembleFitProfiles.FitType'] = self.fit_type
        res.mdh[
            'EnsembleFitProfiles.EnsembleParameterGuess'] = self.ensemble_parameter_guess
        res.mdh[
            'EnsembleFitProfiles.HoldEnsembleParamConstant'] = self.hold_ensemble_parameter_constant

        namespace[self.outputName] = res

    @property
    def _fitter_choices(self):
        return list(profile_fitters.ensemble_fitters.keys())

    @property
    def default_view(self):
        from traitsui.api import View, Item
        from PYME.ui.custom_traits_editors import CBEditor

        return View(Item('inputName',
                         editor=CBEditor(choices=self._namespace_keys)),
                    Item('_'),
                    Item('fit_type',
                         editor=CBEditor(choices=self._fitter_choices)),
                    Item('_'),
                    Item('ensemble_parameter_guess'),
                    Item('_'),
                    Item('hold_ensemble_parameter_constant'),
                    Item('_'),
                    Item('outputName'),
                    buttons=['OK'])

    @property
    def pipeline_view(self):
        from traitsui.api import View, Item
        from PYME.ui.custom_traits_editors import CBEditor

        return View(
            Item('fit_type', editor=CBEditor(choices=self._fitter_choices)),
            Item('_'),
            Item('ensemble_parameter_guess'),
            Item('_'),
            Item('hold_ensemble_parameter_constant'),
        )

    @property
    def dsview_view(self):
        from traitsui.api import View, Item
        from PYME.ui.custom_traits_editors import CBEditor

        return View(Item('fit_type',
                         editor=CBEditor(choices=self._fitter_choices)),
                    Item('_'),
                    Item('ensemble_parameter_guess'),
                    Item('_'),
                    Item('hold_ensemble_parameter_constant'),
                    buttons=['OK'])
コード例 #17
0
class EnsembleFitROIs(
        ModuleBase):  # Note that this should probably be moved somewhere else
    inputName = Input('ROIs')

    fit_type = CStr('LorentzianConvolvedSolidSphere_ensemblePSF')
    ensemble_parameter_guess = Float(50.)
    hold_ensemble_parameter_constant = Bool(False)

    outputName = Output('fit_results')

    def execute(self, namespace):

        inp = namespace[self.inputName]

        # generate RegionHandler from tables
        handler = RegionHandler()
        handler._load_from_list(inp)

        fit_class = region_fitters.fitters[self.fit_type]
        fitter = fit_class(handler)

        if self.hold_ensemble_parameter_constant:
            fitter.fit_profiles(self.ensemble_parameter_guess)
        else:
            fitter.ensemble_fit(self.ensemble_parameter_guess)

        res = tabular.RecArraySource(fitter.results)

        # propagate metadata, if present
        res.mdh = MetaDataHandler.NestedClassMDHandler(
            getattr(inp, 'mdh', None))

        res.mdh['EnsembleFitROIs.FitType'] = self.fit_type
        res.mdh[
            'EnsembleFitROIs.EnsembleParameterGuess'] = self.ensemble_parameter_guess
        res.mdh[
            'EnsembleFitROIs.HoldEnsembleParamConstant'] = self.hold_ensemble_parameter_constant

        namespace[self.outputName] = res

    @property
    def _fitter_choices(self):
        return list(region_fitters.ensemble_fitters.keys())  #FIXME???

    @property
    def default_view(self):
        from traitsui.api import View, Item
        from PYME.ui.custom_traits_editors import CBEditor

        return View(Item('inputName',
                         editor=CBEditor(choices=self._namespace_keys)),
                    Item('_'),
                    Item('fit_type',
                         editor=CBEditor(choices=self._fitter_choices)),
                    Item('_'),
                    Item('ensemble_parameter_guess'),
                    Item('_'),
                    Item('hold_ensemble_parameter_constant'),
                    Item('_'),
                    Item('outputName'),
                    buttons=['OK'])

    @property
    def pipeline_view(self):
        from traitsui.api import View, Item
        from PYME.ui.custom_traits_editors import CBEditor

        return View(
            Item('fit_type', editor=CBEditor(choices=self._fitter_choices)),
            Item('_'),
            Item('ensemble_parameter_guess'),
            Item('_'),
            Item('hold_ensemble_parameter_constant'),
        )

    @property
    def dsview_view(self):
        from traitsui.api import View, Item
        from PYME.ui.custom_traits_editors import CBEditor

        return View(Item('fit_type',
                         editor=CBEditor(choices=self._fitter_choices)),
                    Item('_'),
                    Item('ensemble_parameter_guess'),
                    Item('_'),
                    Item('hold_ensemble_parameter_constant'),
                    buttons=['OK'])
コード例 #18
0
class TestEnsembleParameters(ModuleBase):
    inputName = Input('profiles')

    fit_type = CStr(list(profile_fitters.ensemble_fitters.keys())[0])
    ensemble_test_values = DictStrList(
        {'psf_fwhm': [30, 35, 40, 45, 50, 55, 60, 65, 70, 75, 80]})

    outputName = Output('fit_results')

    def execute(self, namespace):

        inp = namespace[self.inputName]

        # generate LineProfileHandler from tables
        handler = LineProfileHandler()
        handler._load_profiles_from_list(inp)

        fit_class = profile_fitters.ensemble_fitters[self.fit_type]
        fitter = fit_class(handler)

        results = fitter.ensemble_test(self.ensemble_test_values)
        res = tabular.RecArraySource(results)

        # propagate metadata, if present
        res.mdh = MetaDataHandler.NestedClassMDHandler(
            getattr(inp, 'mdh', None))

        res.mdh['TestEnsembleParameters.FitType'] = self.fit_type
        res.mdh[
            'TestEnsembleParameters.EnsembleTestValues'] = self.ensemble_test_values

        namespace[self.outputName] = res

    @property
    def _fitter_choices(self):
        return list(profile_fitters.ensemble_fitters.keys())

    @property
    def default_view(self):
        from traitsui.api import View, Item
        from PYME.ui.custom_traits_editors import CBEditor

        return View(Item('inputName',
                         editor=CBEditor(choices=self._namespace_keys)),
                    Item('_'),
                    Item('fit_type',
                         editor=CBEditor(choices=self._fitter_choices)),
                    Item('_'),
                    Item(''),
                    Item('ensemble_test_values'),
                    Item(''),
                    Item('outputName'),
                    buttons=['OK'])

    @property
    def pipeline_view(self):
        from traitsui.api import View, Item
        from PYME.ui.custom_traits_editors import CBEditor

        return View(
            Item('fit_type', editor=CBEditor(choices=self._fitter_choices)),
            Item('_'),
            Item(''),
            Item('ensemble_test_values'),
        )
コード例 #19
0
class PointCloudRenderLayer(EngineLayer):
    """
    A layer for viewing point-cloud data, using one of 3 engines (indicated above)
    
    """

    # properties to show in the GUI. Note that we also inherit 'visible' from BaseLayer
    vertexColour = CStr('', desc='Name of variable used to colour our points')
    point_size = Float(30.0, desc='Rendered size of the points in nm')
    cmap = Enum(*cm.cmapnames,
                default='gist_rainbow',
                desc='Name of colourmap used to colour points')
    clim = ListFloat(
        [0, 1],
        desc='How our variable should be scaled prior to colour mapping')
    alpha = Float(1.0, desc='Point tranparency')
    method = Enum(*ENGINES.keys(), desc='Method used to display points')
    dsname = CStr(
        'output',
        desc=
        'Name of the datasource within the pipeline to use as a source of points'
    )
    _datasource_keys = List()
    _datasource_choices = List()

    def __init__(self,
                 pipeline,
                 method='points',
                 dsname='',
                 context=None,
                 **kwargs):
        EngineLayer.__init__(self, context=context, **kwargs)
        self._pipeline = pipeline
        self.engine = None
        self.cmap = 'gist_rainbow'

        self.x_key = 'x'  #TODO - make these traits?
        self.y_key = 'y'
        self.z_key = 'z'

        self.xn_key = 'xn'
        self.yn_key = 'yn'
        self.zn_key = 'zn'

        self._bbox = None

        # define a signal so that people can be notified when we are updated (currently used to force a redraw when
        # parameters change)
        self.on_update = dispatch.Signal()

        # define responses to changes in various traits
        self.on_trait_change(self._update, 'vertexColour')
        self.on_trait_change(lambda: self.on_update.send(self), 'visible')
        self.on_trait_change(self.update,
                             'cmap, clim, alpha, dsname, point_size')
        self.on_trait_change(self._set_method, 'method')

        # update any of our traits which were passed as command line arguments
        self.set(**kwargs)

        # update datasource name and method
        #logger.debug('Setting dsname and method')
        self.dsname = dsname
        self.method = method

        self._set_method()

        # if we were given a pipeline, connect ourselves to the onRebuild signal so that we can automatically update
        # ourselves
        if not self._pipeline is None:
            self._pipeline.onRebuild.connect(self.update)

    @property
    def datasource(self):
        """
        Return the datasource we are connected to (through our dsname property).
        """
        return self._pipeline.get_layer_data(self.dsname)

    def _set_method(self):
        #logger.debug('Setting layer method to %s' % self.method)
        self.engine = ENGINES[self.method](self._context)
        self.update()

    def _get_cdata(self):
        try:
            cdata = self.datasource[self.vertexColour]
        except KeyError:
            cdata = np.array([0, 1])

        return cdata

    def _update(self, *args, **kwargs):
        cdata = self._get_cdata()
        self.clim = [float(cdata.min()), float(cdata.max())]
        #self.update(*args, **kwargs)

    def update(self, *args, **kwargs):
        print('lw update')
        self._datasource_choices = self._pipeline.layer_data_source_names
        if not self.datasource is None:
            self._datasource_keys = sorted(self.datasource.keys())

        if not (self.engine is None or self.datasource is None):
            self.update_from_datasource(self.datasource)
            self.on_update.send(self)

    @property
    def bbox(self):
        return self._bbox

    def update_from_datasource(self, ds):
        x, y = ds[self.x_key], ds[self.y_key]

        if not self.z_key is None:
            try:
                z = ds[self.z_key]
            except KeyError:
                z = 0 * x
        else:
            z = 0 * x

        if not self.vertexColour == '':
            c = ds[self.vertexColour]
        else:
            c = 0 * x

        if self.xn_key in ds.keys():
            xn, yn, zn = ds[self.xn_key], ds[self.yn_key], ds[self.zn_key]
            self.update_data(x,
                             y,
                             z,
                             c,
                             cmap=getattr(cm, self.cmap),
                             clim=self.clim,
                             alpha=self.alpha,
                             xn=xn,
                             yn=yn,
                             zn=zn)
        else:
            self.update_data(x,
                             y,
                             z,
                             c,
                             cmap=getattr(cm, self.cmap),
                             clim=self.clim,
                             alpha=self.alpha)

    def update_data(self,
                    x=None,
                    y=None,
                    z=None,
                    colors=None,
                    cmap=None,
                    clim=None,
                    alpha=1.0,
                    xn=None,
                    yn=None,
                    zn=None):
        self._vertices = None
        self._normals = None
        self._colors = None
        self._color_map = None
        self._color_limit = 0
        self._alpha = 0
        if x is not None and y is not None and z is not None:
            vertices = np.vstack((x.ravel(), y.ravel(), z.ravel()))
            vertices = vertices.T.ravel().reshape(len(x.ravel()), 3)

            if not xn is None:
                normals = np.vstack(
                    (xn.ravel(), yn.ravel(),
                     zn.ravel())).T.ravel().reshape(len(x.ravel()), 3)
            else:
                normals = -0.69 * np.ones(vertices.shape)

            self._bbox = np.array(
                [x.min(), y.min(),
                 z.min(), x.max(),
                 y.max(), z.max()])
        else:
            vertices = None
            normals = None
            self._bbox = None

        if clim is not None and colors is not None and clim is not None:
            cs_ = ((colors - clim[0]) / (clim[1] - clim[0]))
            cs = cmap(cs_)
            cs[:, 3] = alpha

            cs = cs.ravel().reshape(len(colors), 4)
        else:
            #cs = None
            if not vertices is None:
                cs = np.ones((vertices.shape[0], 4), 'f')
            else:
                cs = None
            color_map = None
            color_limit = None

        self.set_values(vertices, normals, cs, cmap, clim, alpha)

    def set_values(self,
                   vertices=None,
                   normals=None,
                   colors=None,
                   color_map=None,
                   color_limit=None,
                   alpha=None):
        if vertices is not None:
            self._vertices = vertices
        if normals is not None:
            self._normals = normals
        if color_map is not None:
            self._color_map = color_map
        if colors is not None:
            self._colors = colors
        if color_limit is not None:
            self._color_limit = color_limit
        if alpha is not None:
            self._alpha = alpha

    def get_vertices(self):
        return self._vertices

    def get_normals(self):
        return self._normals

    def get_colors(self):
        return self._colors

    def get_color_map(self):
        return self._color_map

    @property
    def colour_map(self):
        return self._color_map

    def get_color_limit(self):
        return self._color_limit

    @property
    def default_view(self):
        from traitsui.api import View, Item, Group, InstanceEditor, EnumEditor, TextEditor
        from PYME.ui.custom_traits_editors import HistLimitsEditor, CBEditor

        return View([
            Group([
                Item('dsname',
                     label='Data',
                     editor=EnumEditor(name='_datasource_choices')),
            ]),
            Item('method'),
            Item(
                'vertexColour',
                editor=EnumEditor(name='_datasource_keys'),
                label='Colour',
                visible_when='cmap not in ["R", "G", "B", "C", "M","Y", "K"]'),
            Group(
                [
                    Item('clim',
                         editor=HistLimitsEditor(data=self._get_cdata,
                                                 update_signal=self.on_update),
                         show_label=False),
                ],
                visible_when='cmap not in ["R", "G", "B", "C", "M","Y", "K"]'),
            Group(
                Item('cmap', label='LUT'),
                Item('alpha',
                     visible_when=
                     "method in ['pointsprites', 'transparent_points']",
                     editor=TextEditor(auto_set=False,
                                       enter_set=True,
                                       evaluate=float)),
                Item('point_size',
                     editor=TextEditor(auto_set=False,
                                       enter_set=True,
                                       evaluate=float)))
        ])
        #buttons=['OK', 'Cancel'])

    def default_traits_view(self):
        return self.default_view
コード例 #20
0
ファイル: tracks.py プロジェクト: zacsimile/python-microscopy
class TrackRenderLayer(EngineLayer):
    """
    A layer for viewing tracking data

    """

    # properties to show in the GUI. Note that we also inherit 'visible' from BaseLayer
    vertexColour = CStr('', desc='Name of variable used to colour our points')
    cmap = Enum(*cm.cmapnames,
                default='gist_rainbow',
                desc='Name of colourmap used to colour points')
    clim = ListFloat(
        [0, 1],
        desc='How our variable should be scaled prior to colour mapping')
    alpha = Float(1.0, desc='Tranparency')
    line_width = Float(1.0, desc='Track line width')
    method = Enum(*ENGINES.keys(), desc='Method used to display tracks')
    clump_key = CStr('clumpIndex',
                     desc="Name of column containing the track identifier")
    dsname = CStr(
        'output',
        desc=
        'Name of the datasource within the pipeline to use as a source of points'
    )
    _datasource_keys = List()
    _datasource_choices = List()

    def __init__(self,
                 pipeline,
                 method='tracks',
                 dsname='',
                 context=None,
                 **kwargs):
        EngineLayer.__init__(self, context=context, **kwargs)
        self._pipeline = pipeline
        self.engine = None
        self.cmap = 'gist_rainbow'

        self.x_key = 'x'  #TODO - make these traits?
        self.y_key = 'y'
        self.z_key = 'z'

        self._bbox = None

        # define a signal so that people can be notified when we are updated (currently used to force a redraw when
        # parameters change)
        self.on_update = dispatch.Signal()

        # define responses to changes in various traits
        self.on_trait_change(self._update, 'vertexColour')
        self.on_trait_change(lambda: self.on_update.send(self), 'visible')
        self.on_trait_change(self.update,
                             'cmap, clim, alpha, dsname, clump_key')
        self.on_trait_change(self._set_method, 'method')

        # update any of our traits which were passed as command line arguments
        self.set(**kwargs)

        # update datasource name and method
        #logger.debug('Setting dsname and method')
        self.dsname = dsname
        self.method = method

        self._set_method()

        # if we were given a pipeline, connect ourselves to the onRebuild signal so that we can automatically update
        # ourselves
        if not self._pipeline is None:
            self._pipeline.onRebuild.connect(self.update)

    @property
    def datasource(self):
        """
        Return the datasource we are connected to (through our dsname property).
        """
        return self._pipeline.get_layer_data(self.dsname)

    def _set_method(self):
        #logger.debug('Setting layer method to %s' % self.method)
        self.engine = ENGINES[self.method](self._context)
        self.update()

    def _get_cdata(self):
        try:
            if isinstance(self.datasource, ClumpManager):
                cdata = []
                for track in self.datasource.all:
                    cdata.extend(track[self.vertexColour])
                cdata = np.array(cdata)
            else:
                # Assume tabular dataset
                cdata = self.datasource[self.vertexColour]
        except KeyError:
            cdata = np.array([0, 1])

        return cdata

    def _update(self, *args, **kwargs):
        cdata = self._get_cdata()
        self.clim = [float(np.nanmin(cdata)), float(np.nanmax(cdata))]
        #self.update(*args, **kwargs)

    def update(self, *args, **kwargs):
        print('lw update')
        self._datasource_choices = self._pipeline.layer_data_source_names
        if not self.datasource is None:
            if isinstance(self.datasource, ClumpManager):
                # Grab the keys from the first Track in the ClumpManager
                self._datasource_keys = sorted(self.datasource[0].keys())
            else:
                # Assume we have a tabular data source
                self._datasource_keys = sorted(self.datasource.keys())

        if not (self.engine is None or self.datasource is None):
            self.update_from_datasource(self.datasource)
            self.on_update.send(self)

    @property
    def bbox(self):
        return self._bbox

    def update_from_datasource(self, ds):
        if isinstance(ds, ClumpManager):
            x = []
            y = []
            z = []
            c = []
            self.clumpSizes = []

            # Copy data from tracks. This is already in clump order
            # thanks to ClumpManager
            for track in ds.all:
                x.extend(track['x'])
                y.extend(track['y'])
                z.extend(track['z'])
                self.clumpSizes.append(track.nEvents)

                if not self.vertexColour == '':
                    c.extend(track[self.vertexColour])
                else:
                    c.extend([0 for i in track['x']])

            x = np.array(x)
            y = np.array(y)
            z = np.array(z)
            c = np.array(c)

            # print(x,y,z,c)
            # print(x.shape,y.shape,z.shape,c.shape)

        else:
            # Assume tabular data source
            x, y = ds[self.x_key], ds[self.y_key]

            if not self.z_key is None:
                try:
                    z = ds[self.z_key]
                except KeyError:
                    z = 0 * x
            else:
                z = 0 * x

            if not self.vertexColour == '':
                c = ds[self.vertexColour]
            else:
                c = 0 * x

            # Work out clump start and finish indices
            # TODO - optimize / precompute????
            ci = ds[self.clump_key]

            NClumps = int(ci.max())

            clist = [[] for i in range(NClumps)]
            for i, cl_i in enumerate(ci):
                clist[int(cl_i - 1)].append(i)

            # This and self.clumpStarts are class attributes for
            # compatibility with the old Track rendering layer,
            # PYME.LMVis.gl_render3D.TrackLayer
            self.clumpSizes = [len(cl_i) for cl_i in clist]

            #reorder x, y, z, c in clump order
            I = np.hstack([np.array(cl) for cl in clist]).astype(np.int)

            x = x[I]
            y = y[I]
            z = z[I]
            c = c[I]

        self.clumpStarts = np.cumsum([
            0,
        ] + self.clumpSizes)

        #do normal vertex stuff
        vertices = np.vstack((x.ravel(), y.ravel(), z.ravel()))
        vertices = vertices.T.ravel().reshape(len(x.ravel()), 3)

        self._vertices = vertices
        self._normals = -0.69 * np.ones(vertices.shape)
        self._bbox = np.array(
            [x.min(), y.min(),
             z.min(), x.max(),
             y.max(), z.max()])

        clim = self.clim
        cmap = getattr(cm, self.cmap)

        if clim is not None:
            cs_ = ((c - clim[0]) / (clim[1] - clim[0]))
            cs = cmap(cs_)
            cs[:, 3] = float(self.alpha)

            self._colors = cs.ravel().reshape(len(c), 4)
        else:
            if not vertices is None:
                self._colors = np.ones((vertices.shape[0], 4), 'f')

        self._color_map = cmap
        self._color_limit = clim
        self._alpha = float(self.alpha)

    def get_vertices(self):
        return self._vertices

    def get_normals(self):
        return self._normals

    def get_colors(self):
        return self._colors

    def get_color_map(self):
        return self._color_map

    @property
    def colour_map(self):
        return self._color_map

    def get_color_limit(self):
        return self._color_limit

    @property
    def default_view(self):
        from traitsui.api import View, Item, Group, InstanceEditor, EnumEditor, TextEditor
        from PYME.ui.custom_traits_editors import HistLimitsEditor, CBEditor

        return View([
            Group([
                Item('dsname',
                     label='Data',
                     editor=EnumEditor(name='_datasource_choices')),
            ]),
            Item('method'),
            Item(
                'vertexColour',
                editor=EnumEditor(name='_datasource_keys'),
                label='Colour',
                visible_when='cmap not in ["R", "G", "B", "C", "M","Y", "K"]'),
            Group(
                [
                    Item('clim',
                         editor=HistLimitsEditor(data=self._get_cdata,
                                                 update_signal=self.on_update),
                         show_label=False),
                ],
                visible_when='cmap not in ["R", "G", "B", "C", "M","Y", "K"]'),
            Group(
                Item('cmap', label='LUT'),
                Item('alpha',
                     editor=TextEditor(auto_set=False,
                                       enter_set=True,
                                       evaluate=float)), Item('line_width'))
        ])
        #buttons=['OK', 'Cancel'])

    def default_traits_view(self):
        return self.default_view
コード例 #21
0
class ImageUpload(OutputModule):
    """
    Upload a PYME ImageStack to an OMERO server, optionally
    attaching localization files.

    Parameters
    ----------
    input_image : str
        name of image in the recipe namespace to upload
    input_localization_attachments : dict
        maps tabular types (keys) to attachment filenames (values). Tabular's
        will be saved as '.hdf' files and attached to the image
    filePattern : str
        pattern to determine name of image on OMERO server. 'file_stub' will be
        set automatically.
    omero_dataset : str
        name of OMERO dataset to add the image to. If the dataset does not
        already exist it will be created. Can use sample metadata entries
        using {format} syntax
    omero_project : str
        name of OMERO project to link the dataset to. If the project does not
        already exist it will be created. Can use sample metadata entries
        using {format} syntax
    
    Notes
    -----
    OMERO server address and user login information must be stored in the user
    PYME config directory under plugins/config/pyme-omero, e.g. 
    /Users/Andrew/.PYME/plugins/config/pyme-omero. The file should be yaml
    formatted with the following keys:
        user
        password
        address
        port [optional]
    
    The project/image/dataset will be owned by the user set in the yaml file.
    """

    input_image = Input('')
    input_localization_attachments = DictStrStr()

    filePattern = '{file_stub}.tif'

    scheme = Enum(['OMERO'])

    omero_project = CStr('')
    omero_dataset = CStr('{Sample.SlideRef}')

    def _save(self, image, path):
        # force tif extension
        path = os.path.splitext(path)[0] + '.tif'
        image.Save(path)

    def save(self, namespace, context={}):
        """
        Parameters
        ----------
        namespace : dict
            The recipe namespace
        context : dict
            Information about the source file to allow pattern substitution to 
            generate the output name. At least 'file_stub' (which is the 
            filename without any extension) should be resolved.

        """
        from pyme_omero import core
        from tempfile import TemporaryDirectory

        out_filename = self.filePattern.format(**context)

        im = namespace[self.input_image]

        if hasattr(im, 'mdh'):
            sample = Sample()  # hack around our md keys having periods in them
            for k in [k for k in im.mdh.keys() if k.startswith('Sample.')]:
                setattr(sample, k.split('Sample.')[-1], im.mdh[k])
            sample_md = dict(Sample=sample)
        else:
            sample_md = {}

        dataset = self.omero_dataset.format(**sample_md)
        project = self.omero_project.format(**sample_md)

        with TemporaryDirectory() as temp_dir:
            out_filename = os.path.join(temp_dir, out_filename)
            self._save(im, out_filename)

            loc_filenames = []
            for loc_key, loc_stub in self.input_localization_attachments.items(
            ):
                loc_filename = os.path.join(temp_dir, loc_stub)
                if os.path.splitext(loc_filename)[-1] == '':
                    # default to hdf unless h5r is manually specified
                    loc_filename = loc_filename + '.hdf'
                loc_filenames.append(loc_filename)
                try:
                    mdh = namespace[loc_key].mdh
                except AttributeError:
                    mdh = None
                namespace[loc_key].to_hdf(loc_filename, loc_key, metadata=mdh)

            image_id = core.upload_image_from_file(out_filename, dataset,
                                                   project, loc_filenames)

        # if an h5r file is the principle input, upload it
        try:
            principle = os.path.join(context['input_dir'],
                                     context['file_stub']) + '.h5r'
            with core.local_or_named_temp_filename(principle) as f:
                core.connect_and_upload_file_annotation(
                    image_id, f, namespace='pyme.localizations')
        except (KeyError, IOError):
            pass

    @property
    def inputs(self):
        return set(self.input_localization_attachments.keys()).union(
            set([self.input_image]))

    @property
    def default_view(self):
        import wx
        if wx.GetApp() is None:
            return None

        from traitsui.api import View, Item
        from PYME.ui.custom_traits_editors import DictChoiceStrEditor, CBEditor

        inputs, outputs, params = self.get_params()

        return View([
            Item(name='input_image',
                 editor=CBEditor(choices=self._namespace_keys)),
        ] + [
            Item(name='input_localization_attachments',
                 editor=DictChoiceStrEditor(choices=self._namespace_keys)),
        ] + [
            Item('_'),
        ] + self._view_items(params),
                    buttons=['OK', 'Cancel'])

    @property
    def pipeline_view(self):
        return self.default_view

    @property
    def no_localization_view(self):
        import wx
        if wx.GetApp() is None:
            return None

        from traitsui.api import View, Item
        from PYME.ui.custom_traits_editors import CBEditor

        inputs, outputs, params = self.get_params()

        return View([
            Item(name='input_image',
                 editor=CBEditor(choices=self._namespace_keys)),
        ] + [
            Item('_'),
        ] + self._view_items(params),
                    buttons=['OK', 'Cancel'])
コード例 #22
0
class ImageRenderLayer(EngineLayer):
    """
    Layer for viewing images.
    """
    # properties to show in the GUI. Note that we also inherit 'visible' from BaseLayer
    cmap = Enum(*cm.cmapnames, default='gray', desc='Name of colourmap used to colour faces')
    clim = ListFloat([0, 1], desc='How our data should be scaled prior to colour mapping')
    alpha = Float(1.0, desc='Tranparency')
    method = Enum(*ENGINES.keys(), desc='Method used to display image')
    dsname = CStr('output', desc='Name of the datasource within the pipeline to use as an image')
    channel = Int(0)
    slice = Int(0)
    z_pos = Float(0)
    _datasource_choices = List()
    _datasource_keys = List()

    def __init__(self, pipeline, method='image', dsname='', display_opts=None, context=None, **kwargs):
        EngineLayer.__init__(self, context=context, **kwargs)
        self._pipeline = pipeline
        self.engine = None
        self.cmap = 'gray'

        self._bbox = None
        self._do = display_opts #a dh5view display_options instance - if provided, this over-rides the the clim, cmap properties
        
        self._im_key = None

        # define a signal so that people can be notified when we are updated (currently used to force a redraw when
        # parameters change)
        self.on_update = dispatch.Signal()

        # define responses to changes in various traits
        #self.on_trait_change(self._update, 'vertexColour')
        self.on_trait_change(lambda: self.on_update.send(self), 'visible')
        self.on_trait_change(self.update, 'cmap, clim, alpha, dsname')
        self.on_trait_change(self._set_method, 'method')

        # update any of our traits which were passed as command line arguments
        self.set(**kwargs)

        # update datasource and method
        self.dsname = dsname
        if self.method == method:
            #make sure we still call _set_method even if we start with the default method
            self._set_method()
        else:
            self.method = method

        # if we were given a pipeline, connect ourselves to the onRebuild signal so that we can automatically update
        # ourselves
        if (not self._pipeline is None) and hasattr(pipeline, 'onRebuild'):
            self._pipeline.onRebuild.connect(self.update)

    @property
    def datasource(self):
        """
        Return the datasource we are connected to (does not go through the pipeline for triangles_mesh).
        """
        try:
            return self._pipeline.get_layer_data(self.dsname)
        except AttributeError:
            #fallback if pipeline is a dictionary
            return self._pipeline[self.dsname]
        #return self.datasource
    
    @property
    def _ds_class(self):
        # from PYME.experimental import triangle_mesh
        from PYME.IO import image
        return image.ImageStack

    def _set_method(self):
        self.engine = ENGINES[self.method](self._context)
        self.update()


    # def _update(self, *args, **kwargs):
    #     #pass
    #     cdata = self._get_cdata()
    #     self.clim = [float(cdata.min()), float(cdata.max())]
    #     self.update(*args, **kwargs)

    def update(self, *args, **kwargs):
        try:
            self._datasource_choices = [k for k, v in self._pipeline.dataSources.items() if isinstance(v, self._ds_class)]
        except AttributeError:
            self._datasource_choices = [k for k, v in self._pipeline.items() if
                                        isinstance(v, self._ds_class)]
        
        if not (self.engine is None or self.datasource is None):
            print('lw update')
            self.update_from_datasource(self.datasource)
            self.on_update.send(self)

    @property
    def bbox(self):
        return self._bbox
    
    def sync_to_display_opts(self, do=None):
        if (do is None):
            if not (self._do is None):
                do = self._do
            else:
                return

        o = do.Offs[self.channel]
        g = do.Gains[self.channel]
        clim = [o, o + 1.0 / g]

        cmap = do.cmaps[self.channel].name
        visible = do.show[self.channel]
        
        self.set(clim=clim, cmap=cmap, visible=visible)
        

    def update_from_datasource(self, ds):
        """

        Parameters
        ----------
        ds :
            PYME.IO.image.ImageStack object

        Returns
        -------
        None
        """

        
        #if self._do is not None:
            # Let display options (if provied) over-ride our settings (TODO - is this the right way to do this?)
        #    o = self._do.Offs[self.channel]
        #    g = self._do.Gains[self.channel]
        #    clim = [o, o + 1.0/g]
            #self.clim = clim
            
        #    cmap = self._do.cmaps[self.channel]
            #self.visible = self._do.show[self.channel]
        #else:
        
        clim = self.clim
        cmap = getattr(cm, self.cmap)
            
        alpha = float(self.alpha)
        
        c0, c1 = clim
        
        im_key = (self.dsname, self.slice, self.channel)
        
        if not self._im_key == im_key:
            self._im_key = im_key
            self._im = ds.data[:,:,self.slice, self.channel].astype('f4')# - c0)/(c1-c0)
        
            x0, y0, x1, y1, _, _ = ds.imgBounds.bounds

            self._bbox = np.array([x0, y0, 0, x1, y1, 0])
        
            self._bounds = [x0, y0, x1, y1]
            
        self._alpha = alpha
        self._color_map = cmap
        self._color_limit = clim

    def get_color_map(self):
        return self._color_map

    @property
    def colour_map(self):
        return self._color_map

    def get_color_limit(self):
        return self._color_limit
    
    def _get_cdata(self):
        return self._im.ravel()[::20]

    @property
    def default_view(self):
        from traitsui.api import View, Item, Group, InstanceEditor, EnumEditor
        from PYME.ui.custom_traits_editors import HistLimitsEditor, CBEditor

        return View([Group([Item('dsname', label='Data', editor=EnumEditor(name='_datasource_choices')), ]),
                     #Item('method'),
                     Group([Item('clim', editor=HistLimitsEditor(data=self._get_cdata), show_label=False), ]),
                     Group([Item('cmap', label='LUT'),
                            Item('alpha', visible_when='method in ["flat", "tessel"]')
                            ])
                     ], )
        # buttons=['OK', 'Cancel'])

    def default_traits_view(self):
        return self.default_view
コード例 #23
0
class VertexRenderLayer(BaseLayer):
    vertexColour = CStr('')

    def __init__(self,
                 x=None,
                 y=None,
                 z=None,
                 colors=None,
                 color_map=None,
                 color_limit=None,
                 alpha=1.0):
        """
        This creates a new RenderLayer object and parses given data.

        If the parameters are None, they are not processed. There are groups. So if x is
        given, but y is None, both won't be processed.
        There are the following groups:
        (x,y,z)
        (colors, color_map)
        Parameters
        ----------
        x           x_values of the points
        y           y_values of the points
        z           z_values of the points
        colors      color values of the points
        color_map   color map that should be used
        color_limit limits of the color map
        alpha       alpha of the points
        """
        self.x_key = 'x'
        self.y_key = 'y'
        self.z_key = 'z'
        #self.color_key = None

        self._bbox = None

        BaseLayer.__init__(self)
        self.update_data(x, y, z, colors, color_map, color_limit, alpha)
        self.set_shader_program(DefaultShaderProgram)

    @property
    def bbox(self):
        return self._bbox

    def update_from_datasource(self, ds, cmap=None, clim=None, alpha=1.0):
        x, y = ds[self.x_key], ds[self.y_key]

        if not self.z_key is None:
            z = ds[self.z_key]
        else:
            z = 0 * x

        if not self.vertexColour == '':
            c = ds[self.vertexColour]
        else:
            c = None

        self.update_data(x, y, z, c, cmap=cmap, clim=clim, alpha=alpha)

    def update_data(self,
                    x=None,
                    y=None,
                    z=None,
                    colors=None,
                    cmap=None,
                    clim=None,
                    alpha=1.0):
        self._vertices = None
        self._normals = None
        self._colors = None
        self._color_map = None
        self._color_limit = 0
        self._alpha = 0
        if x is not None and y is not None and z is not None:
            vertices = np.vstack((x.ravel(), y.ravel(), z.ravel()))
            vertices = vertices.T.ravel().reshape(len(x.ravel()), 3)
            normals = -0.69 * np.ones(vertices.shape)

            self._bbox = np.array(
                [x.min(), y.min(),
                 z.min(), x.max(),
                 y.max(), z.max()])
        else:
            vertices = None
            normals = None
            self._bbox = None

        if clim is not None and colors is not None and clim is not None:
            cs_ = ((colors - clim[0]) / (clim[1] - clim[0]))
            cs = cmap(cs_)
            cs[:, 3] = alpha

            cs = cs.ravel().reshape(len(colors), 4)
        else:
            #cs = None
            if not vertices is None:
                cs = np.ones((vertices.shape[0], 4), 'f')
            else:
                cs = None
            color_map = None
            color_limit = None

        self.set_values(vertices, normals, cs, cmap, clim, alpha)

    @abstractmethod
    def render(self, gl_canvas):
        if self.get_vertices is None:
            print('Tried to render with Null vertices, aborting')
            return

        self.shader_program.xmin, self.shader_program.xmax = gl_canvas.bounds[
            'x']
        self.shader_program.ymin, self.shader_program.ymax = gl_canvas.bounds[
            'y']
        self.shader_program.zmin, self.shader_program.zmax = gl_canvas.bounds[
            'z']
        self.shader_program.vmin, self.shader_program.vmax = gl_canvas.bounds[
            'v']

        with self.shader_program:

            n_vertices = self.get_vertices().shape[0]

            glVertexPointerf(self.get_vertices())
            glNormalPointerf(self.get_normals())
            glColorPointerf(self.get_colors())

            glDrawArrays(GL_TRIANGLES, 0, n_vertices)

    def set_values(self,
                   vertices=None,
                   normals=None,
                   colors=None,
                   color_map=None,
                   color_limit=None,
                   alpha=None):

        if vertices is not None:
            self._vertices = vertices
        if normals is not None:
            self._normals = normals
        if color_map is not None:
            self._color_map = color_map
        if colors is not None:
            self._colors = colors
        if color_limit is not None:
            self._color_limit = color_limit
        if alpha is not None:
            self._alpha = alpha

    def get_vertices(self):
        return self._vertices

    def get_normals(self):
        return self._normals

    def get_colors(self):
        return self._colors

    def get_color_map(self):
        return self._color_map

    @property
    def colour_map(self):
        return self._color_map

    def get_color_limit(self):
        return self._color_limit

    def view(self, ds_keys):
        from traitsui.api import View, Item, Group, EnumEditor
        from PYME.ui.custom_traits_editors import CBEditor

        return View([
            Item('vertexColour',
                 editor=EnumEditor(values=ds_keys),
                 label='Colour'),
        ])
コード例 #24
0
class TriangleRenderLayer(EngineLayer):
    """
    Layer for viewing triangle meshes.
    """
    # properties to show in the GUI. Note that we also inherit 'visible' from BaseLayer
    vertexColour = CStr('constant', desc='Name of variable used to colour our points')
    cmap = Enum(*cm.cmapnames, default='gist_rainbow', desc='Name of colourmap used to colour faces')
    clim = ListFloat([0, 1], desc='How our variable should be scaled prior to colour mapping')
    alpha = Float(1.0, desc='Face tranparency')
    method = Enum(*ENGINES.keys(), desc='Method used to display faces')
    normal_mode = Enum(['Per vertex', 'Per face'])
    dsname = CStr('output', desc='Name of the datasource within the pipeline to use as a source of triangles (should be a TriangularMesh object)')
    _datasource_choices = List()
    _datasource_keys = List()

    def __init__(self, pipeline, method='wireframe', dsname='', context=None, **kwargs):
        EngineLayer.__init__(self, context=context, **kwargs)
        self._pipeline = pipeline
        self.engine = None
        self.cmap = 'gist_rainbow'

        self.x_key = 'x'  # TODO - make these traits?
        self.y_key = 'y'
        self.z_key = 'z'

        self.xn_key = 'xn'
        self.yn_key = 'yn'
        self.zn_key = 'zn'

        self._bbox = None

        # define a signal so that people can be notified when we are updated (currently used to force a redraw when
        # parameters change)
        self.on_update = dispatch.Signal()

        # define responses to changes in various traits
        self.on_trait_change(self._update, 'vertexColour')
        self.on_trait_change(lambda: self.on_update.send(self), 'visible')
        self.on_trait_change(self.update, 'cmap, clim, alpha, dsname, normal_mode')
        self.on_trait_change(self._set_method, 'method')

        # update any of our traits which were passed as command line arguments
        self.set(**kwargs)

        # update datasource and method
        self.dsname = dsname
        if self.method == method:
            #make sure we still call _set_method even if we start with the default method
            self._set_method()
        else:
            self.method = method

        # if we were given a pipeline, connect ourselves to the onRebuild signal so that we can automatically update
        # ourselves
        if not self._pipeline is None:
            self._pipeline.onRebuild.connect(self.update)

    @property
    def datasource(self):
        """
        Return the datasource we are connected to (does not go through the pipeline for triangles_mesh).
        """
        return self._pipeline.get_layer_data(self.dsname)
        #return self.datasource
    
    @property
    def _ds_class(self):
        # from PYME.experimental import triangle_mesh
        from PYME.experimental import _triangle_mesh as triangle_mesh
        return triangle_mesh.TrianglesBase

    def _set_method(self):
        self.engine = ENGINES[self.method](self._context)
        self.update()

    def _get_cdata(self):
        try:
            cdata = self.datasource[self.vertexColour]
        except (KeyError, TypeError):
            cdata = np.array([0, 1])

        return cdata

    def _update(self, *args, **kwargs):
        #pass
        cdata = self._get_cdata()
        self.clim = [float(cdata.min()), float(cdata.max())]
        self.update(*args, **kwargs)

    def update(self, *args, **kwargs):
        self._datasource_choices = [k for k, v in self._pipeline.dataSources.items() if isinstance(v, self._ds_class)]
        
        if not self.datasource is None:
            dks = ['constant',]
            if hasattr(self.datasource, 'keys'):
                 dks = dks + sorted(self.datasource.keys())
            self._datasource_keys = dks
        
        if not (self.engine is None or self.datasource is None):
            print('lw update')
            self.update_from_datasource(self.datasource)
            self.on_update.send(self)

    @property
    def bbox(self):
        return self._bbox

    def update_from_datasource(self, ds):
        """
        Pulls vertices/normals from a binary STL file. See PYME.IO.FileUtils.stl for more info. Calls update_data on the input.

        Parameters
        ----------
        ds :
            PYME.experimental.triangular_mesh.TriangularMesh object

        Returns
        -------
        None
        """
        #t = ds.vertices[ds.faces]
        #n = ds.vertex_normals[ds.faces]
        
        x, y, z = ds.vertices[ds.faces].reshape(-1, 3).T
        
        if self.normal_mode == 'Per vertex':
            xn, yn, zn = ds.vertex_normals[ds.faces].reshape(-1, 3).T
        else:
            xn, yn, zn = np.repeat(ds.face_normals.T, 3, axis=1)
            
        if self.vertexColour in ['', 'constant']:
            c = np.ones(len(x))
            clim = [0, 1]
        #elif self.vertexColour == 'vertex_index':
        #    c = np.arange(0, len(x))
        else:
            c = ds[self.vertexColour][ds.faces].ravel()
            clim = self.clim

        cmap = getattr(cm, self.cmap)
        alpha = float(self.alpha)

        # Do we have coordinates? Concatenate into vertices.
        if x is not None and y is not None and z is not None:
            vertices = np.vstack((x.ravel(), y.ravel(), z.ravel()))
            self._vertices = vertices.T.ravel().reshape(len(x.ravel()), 3)

            if not xn is None:
                self._normals = np.vstack((xn.ravel(), yn.ravel(), zn.ravel())).T.ravel().reshape(len(x.ravel()), 3)
            else:
                self._normals = -0.69 * np.ones(self._vertices.shape)

            self._bbox = np.array([x.min(), y.min(), z.min(), x.max(), y.max(), z.max()])
        else:
            self._bbox = None

        # TODO: This temporarily sets all triangles to the color red. User should be able to select color.
        if c is None:
            c = np.ones(self._vertices.shape[0]) * 255  # vector of pink
            
        

        if clim is not None and c is not None and cmap is not None:
            cs_ = ((c - clim[0]) / (clim[1] - clim[0]))
            cs = cmap(cs_)

            if self.method in ['flat', 'tessel']:
                alpha = cs_ * alpha
            
            cs[:, 3] = alpha
            
            if self.method == 'tessel':
                cs = np.power(cs, 0.333)

            self._colors = cs.ravel().reshape(len(c), 4)
        else:
            # cs = None
            if not self._vertices is None:
                self._colors = np.ones((self._vertices.shape[0], 4), 'f')
            
        self._alpha = alpha
        self._color_map = cmap
        self._color_limit = clim


    def get_vertices(self):
        return self._vertices

    def get_normals(self):
        return self._normals

    def get_colors(self):
        return self._colors

    def get_color_map(self):
        return self._color_map

    @property
    def colour_map(self):
        return self._color_map

    def get_color_limit(self):
        return self._color_limit

    @property
    def default_view(self):
        from traitsui.api import View, Item, Group, InstanceEditor, EnumEditor
        from PYME.ui.custom_traits_editors import HistLimitsEditor, CBEditor

        return View([Group([Item('dsname', label='Data', editor=EnumEditor(name='_datasource_choices')), ]),
                     Item('method'),
                     Item('normal_mode', visible_when='method=="shaded"'),
                     Item('vertexColour', editor=EnumEditor(name='_datasource_keys'), label='Colour'),
                     Group([Item('clim', editor=HistLimitsEditor(data=self._get_cdata), show_label=False), ], visible_when='vertexColour != "constant"'),
                     Group([Item('cmap', label='LUT'),
                            Item('alpha', visible_when='method in ["flat", "tessel"]')
                            ])
                     ], )
        # buttons=['OK', 'Cancel'])

    def default_traits_view(self):
        return self.default_view
コード例 #25
0
class ClusteringByLabel(ModuleBase):
    """

    Parameters
    ----------
    input_name : Input
        PYME.IO.ImageStack
    mask : Input
        PYME.IO.ImageStack. Optional mask to only calculate metrics

    Returns
    -------
    output_name = Output


    Notes
    -----

    """

    input_name = Input('input')

    mask = Input('')
    excitation_start_frame = Int(10)

    output_vom = CStr('')
    output_mean_pre_excitation = CStr('')
    output_name = Output('cluster_metrics')

    def execute(self, namespace):

        series = namespace[self.input_name]

        # squeeze down from 4D
        data = series.data[:, :, :].squeeze()
        if self.mask == '':  # not the most memory efficient, but make a mask
            logger.debug(
                'No mask provided to ClusteringByLabel, analyzing full image')
            mask = np.ones((data.shape[0], data.shape[1]), int)
        else:
            mask = namespace[self.mask].data[:, :, :].squeeze()
        # toss any negative labels, as well as the zero label (per PYME clustering schema).
        labels = sorted(list(set(np.clip(np.unique(mask), 0, None)) - {0}))
        print(labels)

        n_labels = len(labels)

        # calculate the Variance_t over Mean_t
        var = np.var(data[:, :, self.excitation_start_frame:], axis=2)
        mean = np.mean(data[:, :, self.excitation_start_frame:], axis=2)

        variance_over_mean = var / mean
        if np.isnan(variance_over_mean).any():
            logger.error('Variance over mean contains NaN, see %s' %
                         series.filename)

        mean_pre_excitation = np.mean(data[:, :, :self.excitation_start_frame],
                                      axis=2)

        cluster_metric_mean = np.zeros(n_labels)
        mean_before_excitation = np.zeros(n_labels)

        for li in range(n_labels):
            # everything is 2D at this point
            label_mask = mask == labels[li]
            cluster_metric_mean[li] = np.mean(variance_over_mean[label_mask])
            mean_before_excitation[li] = np.mean(
                mean_pre_excitation[label_mask])

        res = tabular.DictSource({
            'variance_over_mean': cluster_metric_mean,
            'mean_intensity_over_first_10_frames': mean_before_excitation,
            'labels': np.array(labels)
        })
        try:
            res.mdh = series.mdh
        except AttributeError:
            res.mdh = None

        namespace[self.output_name] = res

        if self.output_vom != '':
            namespace[self.output_vom] = image.ImageStack(
                data=variance_over_mean, mdh=res.mdh)

        if self.output_mean_pre_excitation != '':
            namespace[self.output_mean_pre_excitation] = image.ImageStack(
                data=mean_pre_excitation, mdh=res.mdh)
コード例 #26
0
class LayerWrapper(HasTraits):
    cmap = Enum(*cm.cmapnames, default='gist_rainbow')
    clim = ListFloat([0, 1])
    alpha = Float(1.0)
    visible = Bool(True)
    method = Enum(*ENGINES.keys())
    engine = Instance(layers.BaseLayer)
    dsname = CStr('output')

    def __init__(self,
                 pipeline,
                 method='points',
                 ds_name='',
                 cmap='gist_rainbow',
                 clim=[0, 1],
                 alpha=1.0,
                 visible=True,
                 method_args={}):
        self._pipeline = pipeline
        #self._namespace=getattr(pipeline, 'namespace', {})
        #self.dsname = None
        self.engine = None

        self.cmap = cmap
        self.clim = clim
        self.alpha = alpha

        self.visible = visible

        self.on_update = dispatch.Signal()

        self.on_trait_change(lambda: self.on_update.send(self), 'visible')
        self.on_trait_change(self.update, 'cmap, clim, alpha, dsname')
        self.on_trait_change(self._set_method, 'method')

        #self.set_datasource(ds_name)
        self.dsname = ds_name

        self._eng_params = dict(method_args)
        self.method = method

        self._pipeline.onRebuild.connect(self.update)

    @property
    def _namespace(self):
        return self._pipeline.layer_datasources

    @property
    def bbox(self):
        return self.engine.bbox

    @property
    def colour_map(self):
        return self.engine.colour_map

    @property
    def data_source_names(self):
        names = []  #'']
        for k, v in self._namespace.items():
            names.append(k)
            if isinstance(v, tabular.ColourFilter):
                for c in v.getColourChans():
                    names.append('.'.join([k, c]))

        return names

    @property
    def datasource(self):
        if self.dsname == '':
            return self._pipeline

        parts = self.dsname.split('.')
        if len(parts) == 2:
            # special case - permit access to channels using dot notation
            # NB: only works if our underlying datasource is a ColourFilter
            ds, channel = parts
            return self._namespace.get(ds, None).get_channel_ds(channel)
        else:
            return self._namespace.get(self.dsname, None)

    def _set_method(self):
        if self.engine:
            self._eng_params = self.engine.get('point_size', 'vertexColour')
            #print(eng_params)

        self.engine = ENGINES[self.method](self._context)
        self.engine.set(**self._eng_params)
        self.engine.on_trait_change(self._update, 'vertexColour')
        self.engine.on_trait_change(self.update)

        self.update()

    # def set_datasource(self, ds_name):
    #     self._dsname = ds_name
    #
    #     self.update()

    def _update(self, *args, **kwargs):
        cdata = self._get_cdata()
        self.clim = [float(cdata.min()), float(cdata.max())]
        #self.update(*args, **kwargs)

    def update(self, *args, **kwargs):
        print('lw update')
        if not (self.engine is None or self.datasource is None):
            self.engine.update_from_datasource(self.datasource,
                                               getattr(cm, self.cmap),
                                               self.clim, self.alpha)
            self.on_update.send(self)

    def render(self, gl_canvas):
        if self.visible:
            self.engine.render(gl_canvas)

    def _get_cdata(self):
        try:
            cdata = self.datasource[self.engine.vertexColour]
        except KeyError:
            cdata = np.array([0, 1])

        return cdata

    @property
    def default_view(self):
        from traitsui.api import View, Item, Group, InstanceEditor, EnumEditor
        from PYME.ui.custom_traits_editors import HistLimitsEditor, CBEditor

        return View(
            [
                Group([
                    Item('dsname',
                         label='Data',
                         editor=EnumEditor(values=self.data_source_names)),
                ]),
                Item('method'),
                #Item('_'),
                Group([
                    Item('engine',
                         style='custom',
                         show_label=False,
                         editor=InstanceEditor(
                             view=self.engine.view(self.datasource.keys()))),
                ]),
                #Item('engine.color_key', editor=CBEditor(choices=self.datasource.keys())),
                Group([
                    Item('clim',
                         editor=HistLimitsEditor(data=self._get_cdata),
                         show_label=False),
                ]),
                Group([
                    Item('cmap', label='LUT'),
                    Item('alpha'),
                    Item('visible')
                ],
                      orientation='horizontal',
                      layout='flow')
            ], )
        #buttons=['OK', 'Cancel'])

    def default_traits_view(self):
        return self.default_view
コード例 #27
0
class HDBSCANClustering(ModuleBase):
    """
    Performs HDBSCAN clustering on input dictionary

    Parameters
    ----------

        minPtsForCore: The minimum size of clusters. Technically the only required parameter.
        searchRadius: Extract DBSCAN clustering based on search radius. Skipped if 0 or None.

    Notes
    -----

    See https://github.com/scikit-learn-contrib/hdbscan
    Lots of other parameters not mapped.

    """
    input_name = Input('filtered')
    # input_vert = Input('vert')
    columns = ListStr(['x', 'y'])
    search_radius = Float()
    min_clump_size = Int(100)
    clump_column_name = CStr('hdbscan_id')
    clump_prob_column_name = CStr('hdbscan_prob')
    clump_dbscan_column_name = CStr('dbscan_id')
    output_name = Output('hdbscan_clustered')

    def execute(self, namespace):

        # print('testing showpoints again')
        # print(namespace['showplots'])
        inp = namespace[self.input_name]
        mapped = tabular.mappingFilter(inp)
        # vert_data = namespace[self.input_vert]
        import hdbscan
        clusterer = hdbscan.HDBSCAN(min_cluster_size=self.min_clump_size)

        clusterer.fit(np.vstack([inp[k] for k in self.columns]).T)

        # Note that hdbscan gives unclustered points label of -1, and first value starts at 0.
        # shift hdbscan labels up by one to match existing convention that a clumpID of 0 corresponds to unclumped
        mapped.addColumn(str(self.clump_column_name), clusterer.labels_ + 1)
        mapped.addColumn(str(self.clump_prob_column_name),
                         clusterer.probabilities_)

        if not self.search_radius is None and self.search_radius > 0:
            #Extract dbscan clustering from hdbscan clusterer
            dbscan = clusterer.single_linkage_tree_.get_clusters(
                self.search_radius, self.min_clump_size)

            # shift dbscan labels up by one to match existing convention that a clumpID of 0 corresponds to unclumped
            mapped.addColumn(str(self.clump_dbscan_column_name), dbscan + 1)

        # propogate metadata, if present
        try:
            mapped.mdh = inp.mdh
            print('testing for mdh')
        except AttributeError:
            pass

        namespace[self.output_name] = mapped
        print('finished clustering')