コード例 #1
0
ファイル: hypothesesgraph.py プロジェクト: chaubold/hytra
    def pruneGraphToSolution(self, distanceToSolution=0):
        '''
        creates a new pruned HypothesesGraph that around the result. Assumes that value==0 corresponds
        to unlabeled parts of the graph.
        distanceToSolution determines how many negative examples are included
        distanceToSolution = 0: only include negative edges that connect used objects
        distanceToSolution = 1: additionally include edges that connect used objects with unlabeled objects
        '''
        prunedGraph = HypothesesGraph()
        for n in self.nodeIterator():
            if 'value' in self._graph.node[n] and self._graph.node[n]['value'] > 0:
                prunedGraph._graph.add_node(n,**self._graph.node[n])

        for e in self.arcIterator():
            src = self.source(e)
            dest = self.target(e)
            if distanceToSolution == 0:
                if src in prunedGraph._graph and dest in prunedGraph._graph:
                    prunedGraph._graph.add_edge(src,dest,**self._graph.edge[src][dest])

        # TODO: can be optimized by looping over the pruned graph nodes(might sacrifice readability)
        for distance in range(1,distanceToSolution+1):
            for e in self.arcIterator():
                src = self.source(e)
                dest = self.target(e)
                if src in prunedGraph._graph or dest in prunedGraph._graph:
                    prunedGraph._graph.add_node(src,**self._graph.node[src])
                    prunedGraph._graph.add_node(dest,**self._graph.node[dest])
                    prunedGraph._graph.add_edge(src,dest,**self._graph.edge[src][dest])

        # in case a node is NOT an appearance and
        # has all the incoming edges with value 0, we remove all these incoming edges
        #
        # in case a node is NOT a disappearance and
        # has all the outgoing edges with value 0, we remove all these outgoing edges
        withAppearanceFeatures = True
        withDisappearanceFeatures = True
        withFeatures = True
        correctAppearanceFeatureLength = True
        correctDisappearanceFeatureLength = True
        correctFeatureLength = True
        maxNumObjects = None
        maxNumObjectsAppearance = None
        maxNumObjectsDisappearance = None
        for n in self.nodeIterator():
            try:
                maxNumObjectsApp = len(self._graph.node[n]['appearanceFeatures'])-1
                if maxNumObjectsAppearance is None:
                    maxNumObjectsAppearance = maxNumObjectsApp
                elif not maxNumObjectsApp == maxNumObjectsAppearance:
                    correctAppearanceFeatureLength = False
                    getLogger().info('Appearance/disappearance features have different lengths!')
            except:
                withAppearanceFeatures = False
                getLogger().info('There are no appearance features in node properties!')
                break

            try:
                maxNumObjectsDis = len(self._graph.node[n]['disappearanceFeatures'])-1
                if maxNumObjectsDisappearance is None:
                    maxNumObjectsDisappearance = maxNumObjectsDis
                elif not maxNumObjectsDis == maxNumObjectsDisappearance:
                    correctDisappearanceFeatureLength = False
                    getLogger().info('Disappearance features have different lengths!')
            except:
                withDisappearanceFeatures = False
                getLogger().info('There are no disappearance features in node properties!')
                break

        if withAppearanceFeatures and withDisappearanceFeatures:
            if correctAppearanceFeatureLength and correctDisappearanceFeatureLength and maxNumObjectsAppearance == maxNumObjectsDisappearance:
                maxNumObjects = maxNumObjectsAppearance
            else:
                correctFeatureLength = False
                getLogger().info('Appearance and disappearance features have different lengths!')
        else:
            withFeatures = False

        if withFeatures and correctFeatureLength:
            for n in self.nodeIterator():
                if not ('appearance' in self._graph.node[n].keys() and self._graph.node[n]['appearance']):
                    allArcsWithValueZero = True
                    in_edges = self._graph.in_edges(n)
                    for edge in list(in_edges):
                        if 'value' in self._graph.edge[edge[0]][edge[1]].keys() and not self._graph.edge[edge[0]][edge[1]]['value'] == 0:
                            allArcsWithValueZero = False
                            break

                    self._graph.node[n]['appearanceFeatures'] = listify([0.0] + [0.0] * maxNumObjects)
                    if allArcsWithValueZero:
                        if not in_edges == []:
                            self._graph.remove_edges_from(in_edges)

                if not('disappearance' in self._graph.node[n].keys() and self._graph.node[n]['disappearance']):
                    allArcsWithValueZero = True
                    out_edges = self._graph.out_edges(n)
                    for edge in list(out_edges):
                        if 'value' in self._graph.edge[edge[0]][edge[1]].keys() and not self._graph.edge[edge[0]][edge[1]]['value'] == 0:
                            allArcsWithValueZero = False
                            break

                    self._graph.node[n]['disappearanceFeatures'] = listify([0.0] + [0.0] * maxNumObjects)
                    if allArcsWithValueZero:
                        if not out_edges == []:
                            self._graph.remove_edges_from(out_edges)

        return prunedGraph
コード例 #2
0
    def insertEnergies(self, maxNumObjects, detectionProbabilityFunc,
                       transitionProbabilityFunc, boundaryCostMultiplierFunc,
                       divisionProbabilityFunc):
        '''
        Insert energies for detections, divisions and links into the hypotheses graph, 
        by transforming the probabilities for certain
        events (given by the `*ProbabilityFunc`-functions per traxel) into energies. If the given graph
        contained tracklets (`self.withTracklets is True`), then also the probabilities over all contained traxels will be
        accumulated for those nodes in the graph.

        The energies are stored in the networkx graph under the following attribute names (to match the format for solvers):
        * detection energies: `self._graph.node[n]['features']`
        * division energies: `self._graph.node[n]['divisionFeatures']`
        * appearance energies: `self._graph.node[n]['appearanceFeatures']`
        * disappearance energies: `self._graph.node[n]['disappearanceFeatures']`
        * transition energies: `self._graph.edge[src][dest]['features']`
        * additionally we also store the timestep (range for traxels) per node as `timestep` attribute

        ** Parameters: **

        * `maxNumObjects`: the max number of objects per detections
        * `detectionProbabilityFunc`: should take a traxel and return its detection probabilities
         ([prob0objects, prob1object,...])
        * `transitionProbabilityFunc`: should take two traxels and return this link's probabilities
         ([prob0objectsInTransition, prob1objectsInTransition,...])
        * `boundaryCostMultiplierFunc`: should take a traxel and return a scalar multiplier between 0 and 1 for the
         appearance/disappearance cost that depends on the traxel's distance to the spacial and time boundary
        * `divisionProbabilityFunc`: should take a traxel and return its division probabilities ([probNoDiv, probDiv])
        '''
        numElements = self._graph.number_of_nodes(
        ) + self._graph.number_of_edges()
        progressBar = ProgressBar(stop=numElements)

        # insert detection probabilities for all detections (and some also get a div probability)
        for n in self._graph.nodes_iter():
            if not self.withTracklets:
                # only one traxel, but make it a list so everything below works the same
                traxels = [self._graph.node[n]['traxel']]
            else:
                traxels = self._graph.node[n]['tracklet']

            # accumulate features over all contained traxels
            previousTraxel = None
            detectionFeatures = np.zeros(maxNumObjects + 1)
            for t in traxels:
                detectionFeatures += np.array(
                    negLog(detectionProbabilityFunc(t)))
                if previousTraxel is not None:
                    detectionFeatures += np.array(
                        negLog(transitionProbabilityFunc(previousTraxel, t)))
                previousTraxel = t

            detectionFeatures = listify(list(detectionFeatures))

            # division only if probability is big enough
            divisionFeatures = divisionProbabilityFunc(traxels[-1])
            if divisionFeatures is not None:
                divisionFeatures = listify(negLog(divisionFeatures))

            # appearance/disappearance
            appearanceFeatures = listify(
                [0.0] +
                [boundaryCostMultiplierFunc(traxels[0])] * maxNumObjects)
            disappearanceFeatures = listify(
                [0.0] +
                [boundaryCostMultiplierFunc(traxels[-1])] * maxNumObjects)

            self._graph.node[n]['features'] = detectionFeatures
            if divisionFeatures is not None:
                self._graph.node[n]['divisionFeatures'] = divisionFeatures
            self._graph.node[n]['appearanceFeatures'] = appearanceFeatures
            self._graph.node[n][
                'disappearanceFeatures'] = disappearanceFeatures
            self._graph.node[n]['timestep'] = [
                traxels[0].Timestep, traxels[-1].Timestep
            ]

            progressBar.show()

        # insert transition probabilities for all links
        for a in self._graph.edges_iter():
            if not self.withTracklets:
                srcTraxel = self._graph.node[self.source(a)]['traxel']
                destTraxel = self._graph.node[self.target(a)]['traxel']
            else:
                srcTraxel = self._graph.node[self.source(a)]['tracklet'][
                    -1]  # src is last of the traxels in source tracklet
                destTraxel = self._graph.node[self.target(a)]['tracklet'][
                    0]  # dest is first of traxels in destination tracklet

            features = listify(
                negLog(transitionProbabilityFunc(srcTraxel, destTraxel)))

            self._graph.edge[a[0]][a[1]]['src'] = self._graph.node[a[0]]['id']
            self._graph.edge[a[0]][a[1]]['dest'] = self._graph.node[a[1]]['id']
            self._graph.edge[a[0]][a[1]]['features'] = features

            progressBar.show()
コード例 #3
0
ファイル: hypothesesgraph.py プロジェクト: chaubold/hytra
    def insertEnergies(self,
                       maxNumObjects,
                       detectionProbabilityFunc,
                       transitionProbabilityFunc,
                       boundaryCostMultiplierFunc,
                       divisionProbabilityFunc,
                       skipLinksBias):
        '''
        Insert energies for detections, divisions and links into the hypotheses graph, 
        by transforming the probabilities for certain
        events (given by the `*ProbabilityFunc`-functions per traxel) into energies. If the given graph
        contained tracklets (`self.withTracklets is True`), then also the probabilities over all contained traxels will be
        accumulated for those nodes in the graph.

        The energies are stored in the networkx graph under the following attribute names (to match the format for solvers):
        * detection energies: `self._graph.node[n]['features']`
        * division energies: `self._graph.node[n]['divisionFeatures']`
        * appearance energies: `self._graph.node[n]['appearanceFeatures']`
        * disappearance energies: `self._graph.node[n]['disappearanceFeatures']`
        * transition energies: `self._graph.edge[src][dest]['features']`
        * additionally we also store the timestep (range for traxels) per node as `timestep` attribute

        ** Parameters: **

        * `maxNumObjects`: the max number of objects per detections
        * `detectionProbabilityFunc`: should take a traxel and return its detection probabilities
         ([prob0objects, prob1object,...])
        * `transitionProbabilityFunc`: should take two traxels and return this link's probabilities
         ([prob0objectsInTransition, prob1objectsInTransition,...])
        * `boundaryCostMultiplierFunc`: should take a traxel and a boolean that is true if we are seeking for an appearance cost multiplier, 
         false for disappearance, and return a scalar multiplier between 0 and 1 for the
         appearance/disappearance cost that depends on the traxel's distance to the spacial and time boundary
        * `divisionProbabilityFunc`: should take a traxel and return its division probabilities ([probNoDiv, probDiv])
        '''
        numElements = self._graph.number_of_nodes() + self._graph.number_of_edges()
        self.progressVisitor.showState("Inserting energies")

        # insert detection probabilities for all detections (and some also get a div probability)
        countElements = 0
        for n in self._graph.nodes_iter():
            countElements += 1
            if not self.withTracklets:
                # only one traxel, but make it a list so everything below works the same
                traxels = [self._graph.node[n]['traxel']]
            else:
                traxels = self._graph.node[n]['tracklet']

            # accumulate features over all contained traxels
            previousTraxel = None
            detectionFeatures = np.zeros(maxNumObjects + 1)
            for t in traxels:
                detectionFeatures += np.array(negLog(detectionProbabilityFunc(t)))
                if previousTraxel is not None:
                    detectionFeatures += np.array(negLog(transitionProbabilityFunc(previousTraxel, t)))
                previousTraxel = t

            detectionFeatures = listify(list(detectionFeatures))

            # division only if probability is big enough
            divisionFeatures = divisionProbabilityFunc(traxels[-1])
            if divisionFeatures is not None:
                divisionFeatures = listify(negLog(divisionFeatures))

            # appearance/disappearance
            appearanceFeatures = listify([0.0] + [boundaryCostMultiplierFunc(traxels[0], True)] * maxNumObjects)
            disappearanceFeatures = listify([0.0] + [boundaryCostMultiplierFunc(traxels[-1], False)] * maxNumObjects)

            self._graph.node[n]['features'] = detectionFeatures
            if divisionFeatures is not None:
                self._graph.node[n]['divisionFeatures'] = divisionFeatures
            self._graph.node[n]['appearanceFeatures'] = appearanceFeatures
            self._graph.node[n]['disappearanceFeatures'] = disappearanceFeatures
            self._graph.node[n]['timestep'] = [traxels[0].Timestep, traxels[-1].Timestep]

            self.progressVisitor.showProgress(countElements/float(numElements))

        # insert transition probabilities for all links
        for a in self._graph.edges_iter():
            countElements += 1
            self.progressVisitor.showProgress(countElements/float(numElements))

            if not self.withTracklets:
                srcTraxel = self._graph.node[self.source(a)]['traxel']
                destTraxel = self._graph.node[self.target(a)]['traxel']
            else:
                srcTraxel = self._graph.node[self.source(a)]['tracklet'][-1]  # src is last of the traxels in source tracklet
                destTraxel = self._graph.node[self.target(a)]['tracklet'][0]  # dest is first of traxels in destination tracklet

            features = listify(negLog(transitionProbabilityFunc(srcTraxel, destTraxel)))

            # add feature for additional Frames. Since we do not want these edges to be primarily taken, we add a bias to the edge. Now: hard coded, future: parameter
            frame_gap = destTraxel.Timestep - srcTraxel.Timestep

            # 1. method
            if frame_gap > 1:
                features[1][0] = features[1][0] + skipLinksBias*frame_gap

            # # 2. method
            # # introduce a new energies like: [[6], [15]] -> [[6, 23], [15, 23]] for first links and
            # # [[6], [15]] -> [[23, 6], [23, 15]] for second links, and so on for 3rd order links
            # # !!! this will introduce a new weight in the weight.json file. For the 2nd link, comes in 2nd row and so on.
            # # drawback: did not manage to adjust parameter to get sensible results.
            # for feat in features:
            #     for i in range(frame_gap):
            #         feat.append(23)
            #     if frame_gap > 1:
            #         feat[frame_gap-1], feat[0] = feat[0], feat[frame_gap-1]


            self._graph.edge[a[0]][a[1]]['src'] = self._graph.node[a[0]]['id']
            self._graph.edge[a[0]][a[1]]['dest'] = self._graph.node[a[1]]['id']
            self._graph.edge[a[0]][a[1]]['features'] = features
コード例 #4
0
    def insertEnergies(self,
                       maxNumObjects,
                       detectionProbabilityFunc,
                       transitionProbabilityFunc,
                       boundaryCostMultiplierFunc,
                       divisionProbabilityFunc,
                       skipLinksBias):
        '''
        Insert energies for detections, divisions and links into the hypotheses graph, 
        by transforming the probabilities for certain
        events (given by the `*ProbabilityFunc`-functions per traxel) into energies. If the given graph
        contained tracklets (`self.withTracklets is True`), then also the probabilities over all contained traxels will be
        accumulated for those nodes in the graph.

        The energies are stored in the networkx graph under the following attribute names (to match the format for solvers):
        * detection energies: `self._graph.node[n]['features']`
        * division energies: `self._graph.node[n]['divisionFeatures']`
        * appearance energies: `self._graph.node[n]['appearanceFeatures']`
        * disappearance energies: `self._graph.node[n]['disappearanceFeatures']`
        * transition energies: `self._graph.edge[src][dest]['features']`
        * additionally we also store the timestep (range for traxels) per node as `timestep` attribute

        ** Parameters: **

        * `maxNumObjects`: the max number of objects per detections
        * `detectionProbabilityFunc`: should take a traxel and return its detection probabilities
         ([prob0objects, prob1object,...])
        * `transitionProbabilityFunc`: should take two traxels and return this link's probabilities
         ([prob0objectsInTransition, prob1objectsInTransition,...])
        * `boundaryCostMultiplierFunc`: should take a traxel and a boolean that is true if we are seeking for an appearance cost multiplier, 
         false for disappearance, and return a scalar multiplier between 0 and 1 for the
         appearance/disappearance cost that depends on the traxel's distance to the spacial and time boundary
        * `divisionProbabilityFunc`: should take a traxel and return its division probabilities ([probNoDiv, probDiv])
        '''
        numElements = self._graph.number_of_nodes() + self._graph.number_of_edges()
        progressBar = ProgressBar(stop=numElements)

        # insert detection probabilities for all detections (and some also get a div probability)
        for n in self._graph.nodes_iter():
            if not self.withTracklets:
                # only one traxel, but make it a list so everything below works the same
                traxels = [self._graph.node[n]['traxel']]
            else:
                traxels = self._graph.node[n]['tracklet']

            # accumulate features over all contained traxels
            previousTraxel = None
            detectionFeatures = np.zeros(maxNumObjects + 1)
            for t in traxels:
                detectionFeatures += np.array(negLog(detectionProbabilityFunc(t)))
                if previousTraxel is not None:
                    detectionFeatures += np.array(negLog(transitionProbabilityFunc(previousTraxel, t)))
                previousTraxel = t

            detectionFeatures = listify(list(detectionFeatures))

            # division only if probability is big enough
            divisionFeatures = divisionProbabilityFunc(traxels[-1])
            if divisionFeatures is not None:
                divisionFeatures = listify(negLog(divisionFeatures))

            # appearance/disappearance
            appearanceFeatures = listify([0.0] + [boundaryCostMultiplierFunc(traxels[0], True)] * maxNumObjects)
            disappearanceFeatures = listify([0.0] + [boundaryCostMultiplierFunc(traxels[-1], False)] * maxNumObjects)

            self._graph.node[n]['features'] = detectionFeatures
            if divisionFeatures is not None:
                self._graph.node[n]['divisionFeatures'] = divisionFeatures
            self._graph.node[n]['appearanceFeatures'] = appearanceFeatures
            self._graph.node[n]['disappearanceFeatures'] = disappearanceFeatures
            self._graph.node[n]['timestep'] = [traxels[0].Timestep, traxels[-1].Timestep]

            progressBar.show()

        # insert transition probabilities for all links
        for a in self._graph.edges_iter():
            if not self.withTracklets:
                srcTraxel = self._graph.node[self.source(a)]['traxel']
                destTraxel = self._graph.node[self.target(a)]['traxel']
            else:
                srcTraxel = self._graph.node[self.source(a)]['tracklet'][-1]  # src is last of the traxels in source tracklet
                destTraxel = self._graph.node[self.target(a)]['tracklet'][0]  # dest is first of traxels in destination tracklet

            features = listify(negLog(transitionProbabilityFunc(srcTraxel, destTraxel)))

            # add feature for additional Frames. Since we do not want these edges to be primarily taken, we add a bias to the edge. Now: hard coded, future: parameter
            frame_gap = destTraxel.Timestep - srcTraxel.Timestep

            # 1. method
            if frame_gap > 1:
                features[1][0] = features[1][0] + skipLinksBias*frame_gap

            # # 2. method
            # # introduce a new energies like: [[6], [15]] -> [[6, 23], [15, 23]] for first links and
            # # [[6], [15]] -> [[23, 6], [23, 15]] for second links, and so on for 3rd order links
            # # !!! this will introduce a new weight in the weight.json file. For the 2nd link, comes in 2nd row and so on.
            # # drawback: did not manage to adjust parameter to get sensible results.
            # for feat in features:
            #     for i in range(frame_gap):
            #         feat.append(23)
            #     if frame_gap > 1:
            #         feat[frame_gap-1], feat[0] = feat[0], feat[frame_gap-1]


            self._graph.edge[a[0]][a[1]]['src'] = self._graph.node[a[0]]['id']
            self._graph.edge[a[0]][a[1]]['dest'] = self._graph.node[a[1]]['id']
            self._graph.edge[a[0]][a[1]]['features'] = features

            progressBar.show()
コード例 #5
0
    def _minCostMaxFlowMergerResolving(self,
                                       objectFeatures,
                                       transitionClassifier=None,
                                       transitionParameter=5.0):
        """
        Find the optimal assignments within the `resolvedGraph` by running min-cost max-flow from the
        `dpct` module.

        Converts the `resolvedGraph` to our JSON model structure, predicts the transition probabilities
        either using the given transitionClassifier, or using distance-based probabilities.

        **returns** a `nodeFlowMap` and `arcFlowMap` holding information on the usage of the respective nodes and links

        **Note:** cannot use `networkx` flow methods because they don't work with floating point weights.
        """

        trackingGraph = JsonTrackingGraph(progressVisitor=self.progressVisitor)
        for node in self.resolvedGraph.nodes_iter():
            additionalFeatures = {}
            additionalFeatures['nid'] = node

            # nodes with no in/out
            numStates = 2

            if len(self.resolvedGraph.in_edges(node)) == 0:
                # division nodes with no incoming arcs offer 2 units of flow without the need to de-merge
                if node in self.unresolvedGraph.nodes(
                ) and self.unresolvedGraph.node[node]['division'] and len(
                        self.unresolvedGraph.out_edges(node)) == 2:
                    numStates = 3
                additionalFeatures['appearanceFeatures'] = [
                    [i**2 * 0.01] for i in range(numStates)
                ]
            if len(self.resolvedGraph.out_edges(node)) == 0:
                assert (
                    numStates == 2
                )  # division nodes with no incoming should have outgoing, or they shouldn't show up in resolved graph
                additionalFeatures['disappearanceFeatures'] = [
                    [i**2 * 0.01] for i in range(numStates)
                ]

            features = [[i**2] for i in range(numStates)]
            uuid = trackingGraph.addDetectionHypotheses(
                features, **additionalFeatures)
            self.resolvedGraph.node[node]['id'] = uuid

        for edge in self.resolvedGraph.edges_iter():
            src = self.resolvedGraph.node[edge[0]]['id']
            dest = self.resolvedGraph.node[edge[1]]['id']

            featuresAtSrc = objectFeatures[edge[0]]
            featuresAtDest = objectFeatures[edge[1]]

            if transitionClassifier is not None:
                try:
                    featVec = self.pluginManager.applyTransitionFeatureVectorConstructionPlugins(
                        featuresAtSrc, featuresAtDest,
                        transitionClassifier.selectedFeatures)
                except:
                    getLogger().error(
                        "Could not compute transition features of link {}->{}:"
                        .format(src, dest))
                    getLogger().error(featuresAtSrc)
                    getLogger().error(featuresAtDest)
                    raise
                featVec = np.expand_dims(np.array(featVec), axis=0)
                probs = transitionClassifier.predictProbabilities(featVec)[0]
            else:
                dist = np.linalg.norm(featuresAtDest['RegionCenter'] -
                                      featuresAtSrc['RegionCenter'])
                prob = np.exp(-dist / transitionParameter)
                probs = [1.0 - prob, prob]

            trackingGraph.addLinkingHypotheses(src, dest,
                                               listify(negLog(probs)))

        # Set TraxelToUniqueId on resolvedGraph's json graph
        uuidToTraxelMap = {}
        traxelIdPerTimestepToUniqueIdMap = {}

        for node in self.resolvedGraph.nodes_iter():
            uuid = self.resolvedGraph.node[node]['id']
            uuidToTraxelMap[uuid] = [node]

            for t in uuidToTraxelMap[uuid]:
                traxelIdPerTimestepToUniqueIdMap.setdefault(str(t[0]), {})[str(
                    t[1])] = uuid

        trackingGraph.setTraxelToUniqueId(traxelIdPerTimestepToUniqueIdMap)

        # track
        import dpct

        weights = {"weights": [1, 1, 1, 1]}

        if not self.numSplits:
            mergerResult = dpct.trackMaxFlow(trackingGraph.model, weights)
        else:
            getLogger().info("Running split tracking with {} splits.".format(
                self.numSplits))
            mergerResult = SplitTracking.trackFlowBasedWithSplits(
                trackingGraph.model,
                weights,
                numSplits=self.numSplits,
                withMergerResolver=True)

        # transform results to dictionaries that can be indexed by id or (src,dest)
        nodeFlowMap = dict([(int(d['id']), int(d['value']))
                            for d in mergerResult['detectionResults']])
        arcFlowMap = dict([((int(l['src']), int(l['dest'])), int(l['value']))
                           for l in mergerResult['linkingResults']])

        return nodeFlowMap, arcFlowMap
コード例 #6
0
    def _minCostMaxFlowMergerResolving(self, objectFeatures, transitionClassifier=None, transitionParameter=5.0):
        """
        Find the optimal assignments within the `resolvedGraph` by running min-cost max-flow from the
        `dpct` module.

        Converts the `resolvedGraph` to our JSON model structure, predicts the transition probabilities
        either using the given transitionClassifier, or using distance-based probabilities.

        **returns** a `nodeFlowMap` and `arcFlowMap` holding information on the usage of the respective nodes and links

        **Note:** cannot use `networkx` flow methods because they don't work with floating point weights.
        """

        trackingGraph = JsonTrackingGraph()
        for node in self.resolvedGraph.nodes_iter():
            additionalFeatures = {}

            # nodes with no in/out
            numStates = 2
            if len(self.resolvedGraph.in_edges(node)) == 0:
                # division nodes with no incoming arcs offer 2 units of flow without the need to de-merge
                if node in self.unresolvedGraph.nodes() and self.unresolvedGraph.node[node]['division'] and len(self.unresolvedGraph.out_edges(node)) == 2:
                    numStates = 3
                additionalFeatures['appearanceFeatures'] = [[i**2 * 0.01] for i in range(numStates)]
            if len(self.resolvedGraph.out_edges(node)) == 0:
                assert(numStates == 2) # division nodes with no incoming should have outgoing, or they shouldn't show up in resolved graph
                additionalFeatures['disappearanceFeatures'] = [[i**2 * 0.01] for i in range(numStates)]

            features = [[i**2] for i in range(numStates)]
            uuid = trackingGraph.addDetectionHypotheses(features, **additionalFeatures)
            self.resolvedGraph.node[node]['id'] = uuid

        for edge in self.resolvedGraph.edges_iter():
            src = self.resolvedGraph.node[edge[0]]['id']
            dest = self.resolvedGraph.node[edge[1]]['id']

            featuresAtSrc = objectFeatures[edge[0]]
            featuresAtDest = objectFeatures[edge[1]]

            if transitionClassifier is not None:
                try:
                    featVec = self.pluginManager.applyTransitionFeatureVectorConstructionPlugins(
                        featuresAtSrc, featuresAtDest, transitionClassifier.selectedFeatures)
                except:
                    getLogger().error("Could not compute transition features of link {}->{}:".format(src, dest))
                    getLogger().error(featuresAtSrc)
                    getLogger().error(featuresAtDest)
                    raise
                featVec = np.expand_dims(np.array(featVec), axis=0)
                probs = transitionClassifier.predictProbabilities(featVec)[0]
            else:
                dist = np.linalg.norm(featuresAtDest['RegionCenter'] - featuresAtSrc['RegionCenter'])
                prob = np.exp(-dist / transitionParameter)
                probs = [1.0 - prob, prob]

            trackingGraph.addLinkingHypotheses(src, dest, listify(negLog(probs)))

        # track
        import dpct
        weights = {"weights": [1, 1, 1, 1]}
        mergerResult = dpct.trackMaxFlow(trackingGraph.model, weights)

        # transform results to dictionaries that can be indexed by id or (src,dest)
        nodeFlowMap = dict([(int(d['id']), int(d['value'])) for d in mergerResult['detectionResults']])
        arcFlowMap = dict([((int(l['src']), int(l['dest'])), int(l['value'])) for l in mergerResult['linkingResults']])

        return nodeFlowMap, arcFlowMap
コード例 #7
0
ファイル: mergerresolver.py プロジェクト: stuarteberg/hytra
    def _minCostMaxFlowMergerResolving(self,
                                       objectFeatures,
                                       transitionClassifier=None,
                                       transitionParameter=5.0):
        """
        Find the optimal assignments within the `resolvedGraph` by running min-cost max-flow from the
        `dpct` module.

        Converts the `resolvedGraph` to our JSON model structure, predicts the transition probabilities
        either using the given transitionClassifier, or using distance-based probabilities.

        **returns** a `nodeFlowMap` and `arcFlowMap` holding information on the usage of the respective nodes and links

        **Note:** cannot use `networkx` flow methods because they don't work with floating point weights.
        """

        trackingGraph = JsonTrackingGraph()
        for node in self.resolvedGraph.nodes_iter():
            additionalFeatures = {}
            if len(self.resolvedGraph.in_edges(node)) == 0:
                additionalFeatures['appearanceFeatures'] = [[0], [0]]
            if len(self.resolvedGraph.out_edges(node)) == 0:
                additionalFeatures['disappearanceFeatures'] = [[0], [0]]
            uuid = trackingGraph.addDetectionHypotheses([[0], [1]],
                                                        **additionalFeatures)
            self.resolvedGraph.node[node]['id'] = uuid

        for edge in self.resolvedGraph.edges_iter():
            src = self.resolvedGraph.node[edge[0]]['id']
            dest = self.resolvedGraph.node[edge[1]]['id']

            featuresAtSrc = objectFeatures[edge[0]]
            featuresAtDest = objectFeatures[edge[1]]

            if transitionClassifier is not None:
                try:
                    featVec = self.pluginManager.applyTransitionFeatureVectorConstructionPlugins(
                        featuresAtSrc, featuresAtDest,
                        transitionClassifier.selectedFeatures)
                except:
                    getLogger().error(
                        "Could not compute transition features of link {}->{}:"
                        .format(src, dest))
                    getLogger().error(featuresAtSrc)
                    getLogger().error(featuresAtDest)
                    raise
                featVec = np.expand_dims(np.array(featVec), axis=0)
                probs = transitionClassifier.predictProbabilities(featVec)[0]
            else:
                dist = np.linalg.norm(featuresAtDest['RegionCenter'] -
                                      featuresAtSrc['RegionCenter'])
                prob = np.exp(-dist / transitionParameter)
                probs = [1.0 - prob, prob]

            trackingGraph.addLinkingHypotheses(src, dest,
                                               listify(negLog(probs)))

        # track
        import dpct
        weights = {"weights": [1, 1, 1, 1]}
        mergerResult = dpct.trackMaxFlow(trackingGraph.model, weights)

        # transform results to dictionaries that can be indexed by id or (src,dest)
        nodeFlowMap = dict([(int(d['id']), int(d['value']))
                            for d in mergerResult['detectionResults']])
        arcFlowMap = dict([((int(l['src']), int(l['dest'])), int(l['value']))
                           for l in mergerResult['linkingResults']])

        return nodeFlowMap, arcFlowMap