Пример #1
0
    def insertEnergies(self, maxNumObjects, detectionProbabilityFunc,
                       transitionProbabilityFunc, boundaryCostMultiplierFunc,
                       divisionProbabilityFunc):
        '''
        Insert energies for detections, divisions and links into the hypotheses graph, 
        by transforming the probabilities for certain
        events (given by the `*ProbabilityFunc`-functions per traxel) into energies. If the given graph
        contained tracklets (`self.withTracklets is True`), then also the probabilities over all contained traxels will be
        accumulated for those nodes in the graph.

        The energies are stored in the networkx graph under the following attribute names (to match the format for solvers):
        * detection energies: `self._graph.node[n]['features']`
        * division energies: `self._graph.node[n]['divisionFeatures']`
        * appearance energies: `self._graph.node[n]['appearanceFeatures']`
        * disappearance energies: `self._graph.node[n]['disappearanceFeatures']`
        * transition energies: `self._graph.edge[src][dest]['features']`
        * additionally we also store the timestep (range for traxels) per node as `timestep` attribute

        ** Parameters: **

        * `maxNumObjects`: the max number of objects per detections
        * `detectionProbabilityFunc`: should take a traxel and return its detection probabilities
         ([prob0objects, prob1object,...])
        * `transitionProbabilityFunc`: should take two traxels and return this link's probabilities
         ([prob0objectsInTransition, prob1objectsInTransition,...])
        * `boundaryCostMultiplierFunc`: should take a traxel and return a scalar multiplier between 0 and 1 for the
         appearance/disappearance cost that depends on the traxel's distance to the spacial and time boundary
        * `divisionProbabilityFunc`: should take a traxel and return its division probabilities ([probNoDiv, probDiv])
        '''
        numElements = self._graph.number_of_nodes(
        ) + self._graph.number_of_edges()
        progressBar = ProgressBar(stop=numElements)

        # insert detection probabilities for all detections (and some also get a div probability)
        for n in self._graph.nodes_iter():
            if not self.withTracklets:
                # only one traxel, but make it a list so everything below works the same
                traxels = [self._graph.node[n]['traxel']]
            else:
                traxels = self._graph.node[n]['tracklet']

            # accumulate features over all contained traxels
            previousTraxel = None
            detectionFeatures = np.zeros(maxNumObjects + 1)
            for t in traxels:
                detectionFeatures += np.array(
                    negLog(detectionProbabilityFunc(t)))
                if previousTraxel is not None:
                    detectionFeatures += np.array(
                        negLog(transitionProbabilityFunc(previousTraxel, t)))
                previousTraxel = t

            detectionFeatures = listify(list(detectionFeatures))

            # division only if probability is big enough
            divisionFeatures = divisionProbabilityFunc(traxels[-1])
            if divisionFeatures is not None:
                divisionFeatures = listify(negLog(divisionFeatures))

            # appearance/disappearance
            appearanceFeatures = listify(
                [0.0] +
                [boundaryCostMultiplierFunc(traxels[0])] * maxNumObjects)
            disappearanceFeatures = listify(
                [0.0] +
                [boundaryCostMultiplierFunc(traxels[-1])] * maxNumObjects)

            self._graph.node[n]['features'] = detectionFeatures
            if divisionFeatures is not None:
                self._graph.node[n]['divisionFeatures'] = divisionFeatures
            self._graph.node[n]['appearanceFeatures'] = appearanceFeatures
            self._graph.node[n][
                'disappearanceFeatures'] = disappearanceFeatures
            self._graph.node[n]['timestep'] = [
                traxels[0].Timestep, traxels[-1].Timestep
            ]

            progressBar.show()

        # insert transition probabilities for all links
        for a in self._graph.edges_iter():
            if not self.withTracklets:
                srcTraxel = self._graph.node[self.source(a)]['traxel']
                destTraxel = self._graph.node[self.target(a)]['traxel']
            else:
                srcTraxel = self._graph.node[self.source(a)]['tracklet'][
                    -1]  # src is last of the traxels in source tracklet
                destTraxel = self._graph.node[self.target(a)]['tracklet'][
                    0]  # dest is first of traxels in destination tracklet

            features = listify(
                negLog(transitionProbabilityFunc(srcTraxel, destTraxel)))

            self._graph.edge[a[0]][a[1]]['src'] = self._graph.node[a[0]]['id']
            self._graph.edge[a[0]][a[1]]['dest'] = self._graph.node[a[1]]['id']
            self._graph.edge[a[0]][a[1]]['features'] = features

            progressBar.show()
Пример #2
0
    def insertEnergies(self,
                       maxNumObjects,
                       detectionProbabilityFunc,
                       transitionProbabilityFunc,
                       boundaryCostMultiplierFunc,
                       divisionProbabilityFunc,
                       skipLinksBias):
        '''
        Insert energies for detections, divisions and links into the hypotheses graph, 
        by transforming the probabilities for certain
        events (given by the `*ProbabilityFunc`-functions per traxel) into energies. If the given graph
        contained tracklets (`self.withTracklets is True`), then also the probabilities over all contained traxels will be
        accumulated for those nodes in the graph.

        The energies are stored in the networkx graph under the following attribute names (to match the format for solvers):
        * detection energies: `self._graph.node[n]['features']`
        * division energies: `self._graph.node[n]['divisionFeatures']`
        * appearance energies: `self._graph.node[n]['appearanceFeatures']`
        * disappearance energies: `self._graph.node[n]['disappearanceFeatures']`
        * transition energies: `self._graph.edge[src][dest]['features']`
        * additionally we also store the timestep (range for traxels) per node as `timestep` attribute

        ** Parameters: **

        * `maxNumObjects`: the max number of objects per detections
        * `detectionProbabilityFunc`: should take a traxel and return its detection probabilities
         ([prob0objects, prob1object,...])
        * `transitionProbabilityFunc`: should take two traxels and return this link's probabilities
         ([prob0objectsInTransition, prob1objectsInTransition,...])
        * `boundaryCostMultiplierFunc`: should take a traxel and a boolean that is true if we are seeking for an appearance cost multiplier, 
         false for disappearance, and return a scalar multiplier between 0 and 1 for the
         appearance/disappearance cost that depends on the traxel's distance to the spacial and time boundary
        * `divisionProbabilityFunc`: should take a traxel and return its division probabilities ([probNoDiv, probDiv])
        '''
        numElements = self._graph.number_of_nodes() + self._graph.number_of_edges()
        self.progressVisitor.showState("Inserting energies")

        # insert detection probabilities for all detections (and some also get a div probability)
        countElements = 0
        for n in self._graph.nodes_iter():
            countElements += 1
            if not self.withTracklets:
                # only one traxel, but make it a list so everything below works the same
                traxels = [self._graph.node[n]['traxel']]
            else:
                traxels = self._graph.node[n]['tracklet']

            # accumulate features over all contained traxels
            previousTraxel = None
            detectionFeatures = np.zeros(maxNumObjects + 1)
            for t in traxels:
                detectionFeatures += np.array(negLog(detectionProbabilityFunc(t)))
                if previousTraxel is not None:
                    detectionFeatures += np.array(negLog(transitionProbabilityFunc(previousTraxel, t)))
                previousTraxel = t

            detectionFeatures = listify(list(detectionFeatures))

            # division only if probability is big enough
            divisionFeatures = divisionProbabilityFunc(traxels[-1])
            if divisionFeatures is not None:
                divisionFeatures = listify(negLog(divisionFeatures))

            # appearance/disappearance
            appearanceFeatures = listify([0.0] + [boundaryCostMultiplierFunc(traxels[0], True)] * maxNumObjects)
            disappearanceFeatures = listify([0.0] + [boundaryCostMultiplierFunc(traxels[-1], False)] * maxNumObjects)

            self._graph.node[n]['features'] = detectionFeatures
            if divisionFeatures is not None:
                self._graph.node[n]['divisionFeatures'] = divisionFeatures
            self._graph.node[n]['appearanceFeatures'] = appearanceFeatures
            self._graph.node[n]['disappearanceFeatures'] = disappearanceFeatures
            self._graph.node[n]['timestep'] = [traxels[0].Timestep, traxels[-1].Timestep]

            self.progressVisitor.showProgress(countElements/float(numElements))

        # insert transition probabilities for all links
        for a in self._graph.edges_iter():
            countElements += 1
            self.progressVisitor.showProgress(countElements/float(numElements))

            if not self.withTracklets:
                srcTraxel = self._graph.node[self.source(a)]['traxel']
                destTraxel = self._graph.node[self.target(a)]['traxel']
            else:
                srcTraxel = self._graph.node[self.source(a)]['tracklet'][-1]  # src is last of the traxels in source tracklet
                destTraxel = self._graph.node[self.target(a)]['tracklet'][0]  # dest is first of traxels in destination tracklet

            features = listify(negLog(transitionProbabilityFunc(srcTraxel, destTraxel)))

            # add feature for additional Frames. Since we do not want these edges to be primarily taken, we add a bias to the edge. Now: hard coded, future: parameter
            frame_gap = destTraxel.Timestep - srcTraxel.Timestep

            # 1. method
            if frame_gap > 1:
                features[1][0] = features[1][0] + skipLinksBias*frame_gap

            # # 2. method
            # # introduce a new energies like: [[6], [15]] -> [[6, 23], [15, 23]] for first links and
            # # [[6], [15]] -> [[23, 6], [23, 15]] for second links, and so on for 3rd order links
            # # !!! this will introduce a new weight in the weight.json file. For the 2nd link, comes in 2nd row and so on.
            # # drawback: did not manage to adjust parameter to get sensible results.
            # for feat in features:
            #     for i in range(frame_gap):
            #         feat.append(23)
            #     if frame_gap > 1:
            #         feat[frame_gap-1], feat[0] = feat[0], feat[frame_gap-1]


            self._graph.edge[a[0]][a[1]]['src'] = self._graph.node[a[0]]['id']
            self._graph.edge[a[0]][a[1]]['dest'] = self._graph.node[a[1]]['id']
            self._graph.edge[a[0]][a[1]]['features'] = features
Пример #3
0
    def insertEnergies(self,
                       maxNumObjects,
                       detectionProbabilityFunc,
                       transitionProbabilityFunc,
                       boundaryCostMultiplierFunc,
                       divisionProbabilityFunc,
                       skipLinksBias):
        '''
        Insert energies for detections, divisions and links into the hypotheses graph, 
        by transforming the probabilities for certain
        events (given by the `*ProbabilityFunc`-functions per traxel) into energies. If the given graph
        contained tracklets (`self.withTracklets is True`), then also the probabilities over all contained traxels will be
        accumulated for those nodes in the graph.

        The energies are stored in the networkx graph under the following attribute names (to match the format for solvers):
        * detection energies: `self._graph.node[n]['features']`
        * division energies: `self._graph.node[n]['divisionFeatures']`
        * appearance energies: `self._graph.node[n]['appearanceFeatures']`
        * disappearance energies: `self._graph.node[n]['disappearanceFeatures']`
        * transition energies: `self._graph.edge[src][dest]['features']`
        * additionally we also store the timestep (range for traxels) per node as `timestep` attribute

        ** Parameters: **

        * `maxNumObjects`: the max number of objects per detections
        * `detectionProbabilityFunc`: should take a traxel and return its detection probabilities
         ([prob0objects, prob1object,...])
        * `transitionProbabilityFunc`: should take two traxels and return this link's probabilities
         ([prob0objectsInTransition, prob1objectsInTransition,...])
        * `boundaryCostMultiplierFunc`: should take a traxel and a boolean that is true if we are seeking for an appearance cost multiplier, 
         false for disappearance, and return a scalar multiplier between 0 and 1 for the
         appearance/disappearance cost that depends on the traxel's distance to the spacial and time boundary
        * `divisionProbabilityFunc`: should take a traxel and return its division probabilities ([probNoDiv, probDiv])
        '''
        numElements = self._graph.number_of_nodes() + self._graph.number_of_edges()
        progressBar = ProgressBar(stop=numElements)

        # insert detection probabilities for all detections (and some also get a div probability)
        for n in self._graph.nodes_iter():
            if not self.withTracklets:
                # only one traxel, but make it a list so everything below works the same
                traxels = [self._graph.node[n]['traxel']]
            else:
                traxels = self._graph.node[n]['tracklet']

            # accumulate features over all contained traxels
            previousTraxel = None
            detectionFeatures = np.zeros(maxNumObjects + 1)
            for t in traxels:
                detectionFeatures += np.array(negLog(detectionProbabilityFunc(t)))
                if previousTraxel is not None:
                    detectionFeatures += np.array(negLog(transitionProbabilityFunc(previousTraxel, t)))
                previousTraxel = t

            detectionFeatures = listify(list(detectionFeatures))

            # division only if probability is big enough
            divisionFeatures = divisionProbabilityFunc(traxels[-1])
            if divisionFeatures is not None:
                divisionFeatures = listify(negLog(divisionFeatures))

            # appearance/disappearance
            appearanceFeatures = listify([0.0] + [boundaryCostMultiplierFunc(traxels[0], True)] * maxNumObjects)
            disappearanceFeatures = listify([0.0] + [boundaryCostMultiplierFunc(traxels[-1], False)] * maxNumObjects)

            self._graph.node[n]['features'] = detectionFeatures
            if divisionFeatures is not None:
                self._graph.node[n]['divisionFeatures'] = divisionFeatures
            self._graph.node[n]['appearanceFeatures'] = appearanceFeatures
            self._graph.node[n]['disappearanceFeatures'] = disappearanceFeatures
            self._graph.node[n]['timestep'] = [traxels[0].Timestep, traxels[-1].Timestep]

            progressBar.show()

        # insert transition probabilities for all links
        for a in self._graph.edges_iter():
            if not self.withTracklets:
                srcTraxel = self._graph.node[self.source(a)]['traxel']
                destTraxel = self._graph.node[self.target(a)]['traxel']
            else:
                srcTraxel = self._graph.node[self.source(a)]['tracklet'][-1]  # src is last of the traxels in source tracklet
                destTraxel = self._graph.node[self.target(a)]['tracklet'][0]  # dest is first of traxels in destination tracklet

            features = listify(negLog(transitionProbabilityFunc(srcTraxel, destTraxel)))

            # add feature for additional Frames. Since we do not want these edges to be primarily taken, we add a bias to the edge. Now: hard coded, future: parameter
            frame_gap = destTraxel.Timestep - srcTraxel.Timestep

            # 1. method
            if frame_gap > 1:
                features[1][0] = features[1][0] + skipLinksBias*frame_gap

            # # 2. method
            # # introduce a new energies like: [[6], [15]] -> [[6, 23], [15, 23]] for first links and
            # # [[6], [15]] -> [[23, 6], [23, 15]] for second links, and so on for 3rd order links
            # # !!! this will introduce a new weight in the weight.json file. For the 2nd link, comes in 2nd row and so on.
            # # drawback: did not manage to adjust parameter to get sensible results.
            # for feat in features:
            #     for i in range(frame_gap):
            #         feat.append(23)
            #     if frame_gap > 1:
            #         feat[frame_gap-1], feat[0] = feat[0], feat[frame_gap-1]


            self._graph.edge[a[0]][a[1]]['src'] = self._graph.node[a[0]]['id']
            self._graph.edge[a[0]][a[1]]['dest'] = self._graph.node[a[1]]['id']
            self._graph.edge[a[0]][a[1]]['features'] = features

            progressBar.show()
Пример #4
0
    def _minCostMaxFlowMergerResolving(self,
                                       objectFeatures,
                                       transitionClassifier=None,
                                       transitionParameter=5.0):
        """
        Find the optimal assignments within the `resolvedGraph` by running min-cost max-flow from the
        `dpct` module.

        Converts the `resolvedGraph` to our JSON model structure, predicts the transition probabilities
        either using the given transitionClassifier, or using distance-based probabilities.

        **returns** a `nodeFlowMap` and `arcFlowMap` holding information on the usage of the respective nodes and links

        **Note:** cannot use `networkx` flow methods because they don't work with floating point weights.
        """

        trackingGraph = JsonTrackingGraph(progressVisitor=self.progressVisitor)
        for node in self.resolvedGraph.nodes_iter():
            additionalFeatures = {}
            additionalFeatures['nid'] = node

            # nodes with no in/out
            numStates = 2

            if len(self.resolvedGraph.in_edges(node)) == 0:
                # division nodes with no incoming arcs offer 2 units of flow without the need to de-merge
                if node in self.unresolvedGraph.nodes(
                ) and self.unresolvedGraph.node[node]['division'] and len(
                        self.unresolvedGraph.out_edges(node)) == 2:
                    numStates = 3
                additionalFeatures['appearanceFeatures'] = [
                    [i**2 * 0.01] for i in range(numStates)
                ]
            if len(self.resolvedGraph.out_edges(node)) == 0:
                assert (
                    numStates == 2
                )  # division nodes with no incoming should have outgoing, or they shouldn't show up in resolved graph
                additionalFeatures['disappearanceFeatures'] = [
                    [i**2 * 0.01] for i in range(numStates)
                ]

            features = [[i**2] for i in range(numStates)]
            uuid = trackingGraph.addDetectionHypotheses(
                features, **additionalFeatures)
            self.resolvedGraph.node[node]['id'] = uuid

        for edge in self.resolvedGraph.edges_iter():
            src = self.resolvedGraph.node[edge[0]]['id']
            dest = self.resolvedGraph.node[edge[1]]['id']

            featuresAtSrc = objectFeatures[edge[0]]
            featuresAtDest = objectFeatures[edge[1]]

            if transitionClassifier is not None:
                try:
                    featVec = self.pluginManager.applyTransitionFeatureVectorConstructionPlugins(
                        featuresAtSrc, featuresAtDest,
                        transitionClassifier.selectedFeatures)
                except:
                    getLogger().error(
                        "Could not compute transition features of link {}->{}:"
                        .format(src, dest))
                    getLogger().error(featuresAtSrc)
                    getLogger().error(featuresAtDest)
                    raise
                featVec = np.expand_dims(np.array(featVec), axis=0)
                probs = transitionClassifier.predictProbabilities(featVec)[0]
            else:
                dist = np.linalg.norm(featuresAtDest['RegionCenter'] -
                                      featuresAtSrc['RegionCenter'])
                prob = np.exp(-dist / transitionParameter)
                probs = [1.0 - prob, prob]

            trackingGraph.addLinkingHypotheses(src, dest,
                                               listify(negLog(probs)))

        # Set TraxelToUniqueId on resolvedGraph's json graph
        uuidToTraxelMap = {}
        traxelIdPerTimestepToUniqueIdMap = {}

        for node in self.resolvedGraph.nodes_iter():
            uuid = self.resolvedGraph.node[node]['id']
            uuidToTraxelMap[uuid] = [node]

            for t in uuidToTraxelMap[uuid]:
                traxelIdPerTimestepToUniqueIdMap.setdefault(str(t[0]), {})[str(
                    t[1])] = uuid

        trackingGraph.setTraxelToUniqueId(traxelIdPerTimestepToUniqueIdMap)

        # track
        import dpct

        weights = {"weights": [1, 1, 1, 1]}

        if not self.numSplits:
            mergerResult = dpct.trackMaxFlow(trackingGraph.model, weights)
        else:
            getLogger().info("Running split tracking with {} splits.".format(
                self.numSplits))
            mergerResult = SplitTracking.trackFlowBasedWithSplits(
                trackingGraph.model,
                weights,
                numSplits=self.numSplits,
                withMergerResolver=True)

        # transform results to dictionaries that can be indexed by id or (src,dest)
        nodeFlowMap = dict([(int(d['id']), int(d['value']))
                            for d in mergerResult['detectionResults']])
        arcFlowMap = dict([((int(l['src']), int(l['dest'])), int(l['value']))
                           for l in mergerResult['linkingResults']])

        return nodeFlowMap, arcFlowMap
Пример #5
0
    def _minCostMaxFlowMergerResolving(self, objectFeatures, transitionClassifier=None, transitionParameter=5.0):
        """
        Find the optimal assignments within the `resolvedGraph` by running min-cost max-flow from the
        `dpct` module.

        Converts the `resolvedGraph` to our JSON model structure, predicts the transition probabilities
        either using the given transitionClassifier, or using distance-based probabilities.

        **returns** a `nodeFlowMap` and `arcFlowMap` holding information on the usage of the respective nodes and links

        **Note:** cannot use `networkx` flow methods because they don't work with floating point weights.
        """

        trackingGraph = JsonTrackingGraph()
        for node in self.resolvedGraph.nodes_iter():
            additionalFeatures = {}

            # nodes with no in/out
            numStates = 2
            if len(self.resolvedGraph.in_edges(node)) == 0:
                # division nodes with no incoming arcs offer 2 units of flow without the need to de-merge
                if node in self.unresolvedGraph.nodes() and self.unresolvedGraph.node[node]['division'] and len(self.unresolvedGraph.out_edges(node)) == 2:
                    numStates = 3
                additionalFeatures['appearanceFeatures'] = [[i**2 * 0.01] for i in range(numStates)]
            if len(self.resolvedGraph.out_edges(node)) == 0:
                assert(numStates == 2) # division nodes with no incoming should have outgoing, or they shouldn't show up in resolved graph
                additionalFeatures['disappearanceFeatures'] = [[i**2 * 0.01] for i in range(numStates)]

            features = [[i**2] for i in range(numStates)]
            uuid = trackingGraph.addDetectionHypotheses(features, **additionalFeatures)
            self.resolvedGraph.node[node]['id'] = uuid

        for edge in self.resolvedGraph.edges_iter():
            src = self.resolvedGraph.node[edge[0]]['id']
            dest = self.resolvedGraph.node[edge[1]]['id']

            featuresAtSrc = objectFeatures[edge[0]]
            featuresAtDest = objectFeatures[edge[1]]

            if transitionClassifier is not None:
                try:
                    featVec = self.pluginManager.applyTransitionFeatureVectorConstructionPlugins(
                        featuresAtSrc, featuresAtDest, transitionClassifier.selectedFeatures)
                except:
                    getLogger().error("Could not compute transition features of link {}->{}:".format(src, dest))
                    getLogger().error(featuresAtSrc)
                    getLogger().error(featuresAtDest)
                    raise
                featVec = np.expand_dims(np.array(featVec), axis=0)
                probs = transitionClassifier.predictProbabilities(featVec)[0]
            else:
                dist = np.linalg.norm(featuresAtDest['RegionCenter'] - featuresAtSrc['RegionCenter'])
                prob = np.exp(-dist / transitionParameter)
                probs = [1.0 - prob, prob]

            trackingGraph.addLinkingHypotheses(src, dest, listify(negLog(probs)))

        # track
        import dpct
        weights = {"weights": [1, 1, 1, 1]}
        mergerResult = dpct.trackMaxFlow(trackingGraph.model, weights)

        # transform results to dictionaries that can be indexed by id or (src,dest)
        nodeFlowMap = dict([(int(d['id']), int(d['value'])) for d in mergerResult['detectionResults']])
        arcFlowMap = dict([((int(l['src']), int(l['dest'])), int(l['value'])) for l in mergerResult['linkingResults']])

        return nodeFlowMap, arcFlowMap
Пример #6
0
    def _minCostMaxFlowMergerResolving(self,
                                       objectFeatures,
                                       transitionClassifier=None,
                                       transitionParameter=5.0):
        """
        Find the optimal assignments within the `resolvedGraph` by running min-cost max-flow from the
        `dpct` module.

        Converts the `resolvedGraph` to our JSON model structure, predicts the transition probabilities
        either using the given transitionClassifier, or using distance-based probabilities.

        **returns** a `nodeFlowMap` and `arcFlowMap` holding information on the usage of the respective nodes and links

        **Note:** cannot use `networkx` flow methods because they don't work with floating point weights.
        """

        trackingGraph = JsonTrackingGraph()
        for node in self.resolvedGraph.nodes_iter():
            additionalFeatures = {}
            if len(self.resolvedGraph.in_edges(node)) == 0:
                additionalFeatures['appearanceFeatures'] = [[0], [0]]
            if len(self.resolvedGraph.out_edges(node)) == 0:
                additionalFeatures['disappearanceFeatures'] = [[0], [0]]
            uuid = trackingGraph.addDetectionHypotheses([[0], [1]],
                                                        **additionalFeatures)
            self.resolvedGraph.node[node]['id'] = uuid

        for edge in self.resolvedGraph.edges_iter():
            src = self.resolvedGraph.node[edge[0]]['id']
            dest = self.resolvedGraph.node[edge[1]]['id']

            featuresAtSrc = objectFeatures[edge[0]]
            featuresAtDest = objectFeatures[edge[1]]

            if transitionClassifier is not None:
                try:
                    featVec = self.pluginManager.applyTransitionFeatureVectorConstructionPlugins(
                        featuresAtSrc, featuresAtDest,
                        transitionClassifier.selectedFeatures)
                except:
                    getLogger().error(
                        "Could not compute transition features of link {}->{}:"
                        .format(src, dest))
                    getLogger().error(featuresAtSrc)
                    getLogger().error(featuresAtDest)
                    raise
                featVec = np.expand_dims(np.array(featVec), axis=0)
                probs = transitionClassifier.predictProbabilities(featVec)[0]
            else:
                dist = np.linalg.norm(featuresAtDest['RegionCenter'] -
                                      featuresAtSrc['RegionCenter'])
                prob = np.exp(-dist / transitionParameter)
                probs = [1.0 - prob, prob]

            trackingGraph.addLinkingHypotheses(src, dest,
                                               listify(negLog(probs)))

        # track
        import dpct
        weights = {"weights": [1, 1, 1, 1]}
        mergerResult = dpct.trackMaxFlow(trackingGraph.model, weights)

        # transform results to dictionaries that can be indexed by id or (src,dest)
        nodeFlowMap = dict([(int(d['id']), int(d['value']))
                            for d in mergerResult['detectionResults']])
        arcFlowMap = dict([((int(l['src']), int(l['dest'])), int(l['value']))
                           for l in mergerResult['linkingResults']])

        return nodeFlowMap, arcFlowMap