Ejemplo n.º 1
0
    def test_graph_interface( self ):
        # exercise the interface

        g = pgmlink.HypothesesGraph()
        n1 = g.addNode(0)
        n2 = g.addNode(5)
        n3 = g.addNode(7)
        a1 = g.addArc(n1, n2)
        a2 = g.addArc(n1,n3)
        self.assertEqual( pgmlink.countNodes(g), 3)
        self.assertEqual( pgmlink.countArcs(g), 2)
        self.assertEqual( g.earliest_timestep(), 0 )
        self.assertEqual( g.latest_timestep(), 7 )

        g.erase(a2)
        g.erase(n3)
        self.assertEqual( pgmlink.countNodes(g), 2)
        self.assertEqual( pgmlink.countArcs(g), 1)
        self.assertTrue( g.valid(n1) ) 
        self.assertTrue( g.valid(n2) )
        self.assertTrue( not g.valid(n3) ) 
        self.assertTrue( g.valid(a1) ) 
        self.assertTrue( not g.valid(a2) )  
Ejemplo n.º 2
0
    def test_graph_interface(self):
        # exercise the interface

        g = pgmlink.HypothesesGraph()
        n1 = g.addNode(0)
        n2 = g.addNode(5)
        n3 = g.addNode(7)
        a1 = g.addArc(n1, n2)
        a2 = g.addArc(n1, n3)
        self.assertEqual(pgmlink.countNodes(g), 3)
        self.assertEqual(pgmlink.countArcs(g), 2)
        self.assertEqual(g.earliest_timestep(), 0)
        self.assertEqual(g.latest_timestep(), 7)

        g.erase(a2)
        g.erase(n3)
        self.assertEqual(pgmlink.countNodes(g), 2)
        self.assertEqual(pgmlink.countArcs(g), 1)
        self.assertTrue(g.valid(n1))
        self.assertTrue(g.valid(n2))
        self.assertTrue(not g.valid(n3))
        self.assertTrue(g.valid(a1))
        self.assertTrue(not g.valid(a2))
Ejemplo n.º 3
0
        obj_size[0] = options.avg_obj_size
    else:
        options.avg_obj_size = obj_size

    # load traxelstore
    ts, fs, ndim, t0, t1, probGenerator, transitionClassifier = loadTraxelstoreAndTransitionClassifier(
        options, ilp_fn, time_range, shape)

    # build hypotheses graph
    hypotheses_graph, n_it, a_it, fov = getHypothesesGraphAndIterators(
        options, shape, t0, t1, ts, probGenerator, transitionClassifier,
        options.skipLinks, options.skipLinksBias)

    if probGenerator is None:
        import pgmlink
        numElements = pgmlink.countNodes(hypotheses_graph) + pgmlink.countArcs(
            hypotheses_graph)

        # get the map of node -> list(traxel) or just traxel
        if options.without_tracklets:
            traxelMap = hypotheses_graph.getNodeTraxelMap()
        else:
            traxelMap = hypotheses_graph.getNodeTrackletMap()

        maxNumObjects = int(options.max_num_objects)
        margin = float(options.border_width)

        def detectionProbabilityFunc(traxel):
            return getDetectionFeatures(traxel, maxNumObjects + 1)

        def transitionProbabilityFunc(srcTraxel, destTraxel):
Ejemplo n.º 4
0
def track_subgraphs(graph,
                    time_range,
                    timesteps_per_segment,
                    segment_overlap_timesteps,
                    conservation_tracking_parameter,
                    fov,
                    ilp_fn,
                    ts, 
                    fs,
                    t0,
                    trans_classifier,
                    uncertaintyParam
                    ):
    """
    Experiment: track only subgraphs of the full hypotheses graph with some overlap,
    and then stitch the results together using fusion moves.
    """
    # define which segments we have
    num_segments = int(np.ceil(float((time_range[1] - time_range[0])) / (timesteps_per_segment - segment_overlap_timesteps)))
    segments = [(time_range[0] + i * (timesteps_per_segment - segment_overlap_timesteps),
                 (time_range[0] + (i + 1) * timesteps_per_segment - i * segment_overlap_timesteps))
                for i in xrange(num_segments)]

    tmap = graph.getNodeTraxelMap()
    solutions = {}
    arc_solutions = {}
    div_solutions = {}

    original_out_dir = options.out_dir

    # track all segments individually
    for i, segment in enumerate(segments):
        print("************** Creating subgraph for timesteps in {}".format(segment))

        # use special out-dir per window
        options.out_dir = original_out_dir.rstrip('/') + '/window_' + str(i) + '/'
        try:
            os.makedirs(options.out_dir)
        except:
            pass

        # create subgraph for this segment
        node_mask = track.NodeMask(graph)
        n_it = track.NodeIt(graph)
        for n in n_it:
            node_mask[n] = segment[0] <= tmap[n].Timestep < segment[1]

        arc_mask = track.ArcMask(graph)
        a_it = track.ArcIt(graph)
        for a in a_it:
            arc_mask[a] = tmap[graph.source(a)].Timestep >= segment[0] and tmap[graph.target(a)].Timestep < segment[1]
        subgraph = track.HypothesesGraph()
        track.copy_hypotheses_subgraph(graph, subgraph, node_mask, arc_mask)
        subgraph_node_origin_map = subgraph.getNodeOriginReferenceMap()
        subgraph_arc_origin_map = subgraph.getArcOriginReferenceMap()
        subgraph.initLabelingMaps()

        # fix variables in overlap
        if i > 0:
            sub_tmap = subgraph.getNodeTraxelMap()
            n_it = track.NodeIt(subgraph)
            for n in n_it:
                if segment[0] == sub_tmap[n].Timestep:
                    origin_node = subgraph_node_origin_map[n]
                    origin_node_id = graph.id(origin_node)
                    subgraph.addAppearanceLabel(n, solutions[origin_node_id][-1])
                    print "fixing node ", origin_node_id, " which is ", subgraph.id(n), " in subgraph"

        print("Subgraph has {} nodes and {} arcs".format(track.countNodes(subgraph), track.countArcs(subgraph)))

        # create subgraph tracker
        subgraph_tracker = track.ConsTracking(subgraph,
                                                ts,
                                                conservation_tracking_parameter,
                                                uncertaintyParam,
                                                fov,
                                                bool(options.size_dependent_detection_prob),
                                                options.avg_obj_size[0],
                                                options.mnd,
                                                options.division_threshold)
        all_events = subgraph_tracker.track(conservation_tracking_parameter, bool(i > 0))

        if len(options.raw_filename) > 0 and len(options.reranker_weight_file) > 0:
            # run merger resolving and feature extraction, which also returns the score of each proposal
            region_features = multitrack.getRegionFeatures(ndim)
            scores = multitrack.runMergerResolving(options, 
                subgraph_tracker, 
                ts,
                fs,
                subgraph,
                ilp_fn,
                all_events,
                fov,
                region_features,
                trans_classifier,
                segment[0],
                True)

            best_sol_idx = int(np.argmax(np.array(scores)))
            subgraph.set_solution(best_sol_idx)
            print("====> selected solution {} in window {} <=====".format(best_sol_idx, i))
        else:
            subgraph.set_solution(0)
        print("Done tracking subgraph")

        # collect solutions
        subgraph_node_active_map = subgraph.getNodeActiveMap()
        subgraph_arc_active_map = subgraph.getArcActiveMap()
        subgraph_div_active_map = subgraph.getDivisionActiveMap()

        n_it = track.NodeIt(subgraph)
        for n in n_it:
            origin_node = subgraph_node_origin_map[n]
            origin_node_id = graph.id(origin_node)
            value = subgraph_node_active_map[n]

            if not origin_node_id in solutions:
                solutions[origin_node_id] = [value]
            else:
                solutions[origin_node_id].append(value)
            div_solutions[origin_node_id] = subgraph_div_active_map[n]
        a_it = track.ArcIt(subgraph)
        for a in a_it:
            origin_arc = subgraph_arc_origin_map[a]
            origin_arc_id = graph.id(origin_arc)
            arc_solutions[origin_arc_id] = subgraph_arc_active_map[a]
        print("Done storing solutions")

    # reset out-dir
    options.out_dir = original_out_dir

    # find overlapping variables
    print("Computing overlap statistics...")
    num_overlap_vars = sum([1 for values in solutions.values() if len(values) > 1])
    num_disagreeing_overlap_vars = sum([1 for values in solutions.values() if len(values) > 1 and values[0] != values[1]])

    for key, values in solutions.items():
        if len(values) > 1 and values[0] != values[1]:
            print("\tFound disagreement at {}: {} != {}".format(key, values[0], values[1]))

    print("Found {} variables in overlaps, of which {} did disagree ({}%)".format(num_overlap_vars,
                                                                                  num_disagreeing_overlap_vars,
                                                                                  100.0 * float(num_disagreeing_overlap_vars) / num_overlap_vars))
    
    if num_disagreeing_overlap_vars == 0:
        # write overall solution back to hypotheses graph
        graph.initLabelingMaps()
        n_it = track.NodeIt(graph)
        for n in n_it:
            n_id = graph.id(n)

            graph.addAppearanceLabel(n, solutions[n_id][-1])
            graph.addDisappearanceLabel(n, solutions[n_id][-1])

            # store division information
            graph.addDivisionLabel(n, div_solutions[n_id])

        # activate arcs
        a_it = track.ArcIt(graph)
        for a in a_it:
            a_id = graph.id(a)
            graph.addArcLabel(a, arc_solutions[a_id])
        graph.set_injected_solution()
    else:
        raise AssertionError("Nodes did disagree, cannot create stitched solution")
    if options.avg_obj_size != 0:
        obj_size[0] = options.avg_obj_size
    else:
        options.avg_obj_size = obj_size

    # load traxelstore
    ts, fs, ndim, t0, t1, probGenerator, transitionClassifier = loadTraxelstoreAndTransitionClassifier(options, ilp_fn,
                                                                                                       time_range,
                                                                                                       shape)

    # build hypotheses graph
    hypotheses_graph, n_it, a_it, fov = getHypothesesGraphAndIterators(options, shape, t0, t1, ts, probGenerator, transitionClassifier, options.skipLinks, options.skipLinksBias)

    if probGenerator is None:
        import pgmlink
        numElements = pgmlink.countNodes(hypotheses_graph) + pgmlink.countArcs(hypotheses_graph)
        
        # get the map of node -> list(traxel) or just traxel
        if options.without_tracklets:
            traxelMap = hypotheses_graph.getNodeTraxelMap()
        else:
            traxelMap = hypotheses_graph.getNodeTrackletMap()


        maxNumObjects = int(options.max_num_objects)
        margin = float(options.border_width)

        def detectionProbabilityFunc(traxel):
            return getDetectionFeatures(traxel, maxNumObjects + 1)

        def transitionProbabilityFunc(srcTraxel, destTraxel):
def track_subgraphs(graph,
                    time_range,
                    timesteps_per_segment,
                    segment_overlap_timesteps,
                    conservation_tracking_parameter,
                    fov,
                    ilp_fn,
                    ts, 
                    fs,
                    t0,
                    trans_classifier,
                    uncertaintyParam
                    ):
    """
    Experiment: track only subgraphs of the full hypotheses graph with some overlap,
    and then stitch the results together using fusion moves.
    """
    # define which segments we have
    num_segments = int(np.ceil(float((time_range[1] - time_range[0])) / (timesteps_per_segment - segment_overlap_timesteps)))
    segments = [(time_range[0] + i * (timesteps_per_segment - segment_overlap_timesteps),
                 (time_range[0] + (i + 1) * timesteps_per_segment - i * segment_overlap_timesteps))
                for i in xrange(num_segments)]

    tmap = graph.getNodeTraxelMap()
    solutions = {}
    arc_solutions = {}
    div_solutions = {}

    original_out_dir = options.out_dir

    # track all segments individually
    for i, segment in enumerate(segments):
        print("************** Creating subgraph for timesteps in {}".format(segment))

        # use special out-dir per window
        options.out_dir = original_out_dir.rstrip('/') + '/window_' + str(i) + '/'
        try:
            os.makedirs(options.out_dir)
        except:
            pass

        # create subgraph for this segment
        node_mask = track.NodeMask(graph)
        n_it = track.NodeIt(graph)
        for n in n_it:
            node_mask[n] = segment[0] <= tmap[n].Timestep < segment[1]

        arc_mask = track.ArcMask(graph)
        a_it = track.ArcIt(graph)
        for a in a_it:
            arc_mask[a] = tmap[graph.source(a)].Timestep >= segment[0] and tmap[graph.target(a)].Timestep < segment[1]
        subgraph = track.HypothesesGraph()
        track.copy_hypotheses_subgraph(graph, subgraph, node_mask, arc_mask)
        subgraph_node_origin_map = subgraph.getNodeOriginReferenceMap()
        subgraph_arc_origin_map = subgraph.getArcOriginReferenceMap()
        subgraph.initLabelingMaps()

        # fix variables in overlap
        if i > 0:
            sub_tmap = subgraph.getNodeTraxelMap()
            n_it = track.NodeIt(subgraph)
            for n in n_it:
                if segment[0] == sub_tmap[n].Timestep:
                    origin_node = subgraph_node_origin_map[n]
                    origin_node_id = graph.id(origin_node)
                    subgraph.addAppearanceLabel(n, solutions[origin_node_id][-1])
                    print "fixing node ", origin_node_id, " which is ", subgraph.id(n), " in subgraph"

        print("Subgraph has {} nodes and {} arcs".format(track.countNodes(subgraph), track.countArcs(subgraph)))

        # create subgraph tracker
        subgraph_tracker = track.ConsTracking(subgraph,
                                                ts,
                                                conservation_tracking_parameter,
                                                uncertaintyParam,
                                                fov,
                                                bool(options.size_dependent_detection_prob),
                                                options.avg_obj_size[0],
                                                options.mnd,
                                                options.division_threshold)
        all_events = subgraph_tracker.track(conservation_tracking_parameter, bool(i > 0))

        if len(options.raw_filename) > 0 and len(options.reranker_weight_file) > 0:
            # run merger resolving and feature extraction, which also returns the score of each proposal
            region_features = multitrack.getRegionFeatures(ndim)
            scores = multitrack.runMergerResolving(options, 
                subgraph_tracker, 
                ts,
                fs,
                subgraph,
                ilp_fn,
                all_events,
                fov,
                region_features,
                trans_classifier,
                segment[0],
                True)

            best_sol_idx = int(np.argmax(np.array(scores)))
            subgraph.set_solution(best_sol_idx)
            print("====> selected solution {} in window {} <=====".format(best_sol_idx, i))
        else:
            subgraph.set_solution(0)
        print("Done tracking subgraph")

        # collect solutions
        subgraph_node_active_map = subgraph.getNodeActiveMap()
        subgraph_arc_active_map = subgraph.getArcActiveMap()
        subgraph_div_active_map = subgraph.getDivisionActiveMap()

        n_it = track.NodeIt(subgraph)
        for n in n_it:
            origin_node = subgraph_node_origin_map[n]
            origin_node_id = graph.id(origin_node)
            value = subgraph_node_active_map[n]

            if not origin_node_id in solutions:
                solutions[origin_node_id] = [value]
            else:
                solutions[origin_node_id].append(value)
            div_solutions[origin_node_id] = subgraph_div_active_map[n]
        a_it = track.ArcIt(subgraph)
        for a in a_it:
            origin_arc = subgraph_arc_origin_map[a]
            origin_arc_id = graph.id(origin_arc)
            arc_solutions[origin_arc_id] = subgraph_arc_active_map[a]
        print("Done storing solutions")

    # reset out-dir
    options.out_dir = original_out_dir

    # find overlapping variables
    print("Computing overlap statistics...")
    num_overlap_vars = sum([1 for values in solutions.values() if len(values) > 1])
    num_disagreeing_overlap_vars = sum([1 for values in solutions.values() if len(values) > 1 and values[0] != values[1]])

    for key, values in solutions.iteritems():
        if len(values) > 1 and values[0] != values[1]:
            print("\tFound disagreement at {}: {} != {}".format(key, values[0], values[1]))

    print("Found {} variables in overlaps, of which {} did disagree ({}%)".format(num_overlap_vars,
                                                                                  num_disagreeing_overlap_vars,
                                                                                  100.0 * float(num_disagreeing_overlap_vars) / num_overlap_vars))
    
    if num_disagreeing_overlap_vars == 0:
        # write overall solution back to hypotheses graph
        graph.initLabelingMaps()
        n_it = track.NodeIt(graph)
        for n in n_it:
            n_id = graph.id(n)

            graph.addAppearanceLabel(n, solutions[n_id][-1])
            graph.addDisappearanceLabel(n, solutions[n_id][-1])

            # store division information
            graph.addDivisionLabel(n, div_solutions[n_id])

        # activate arcs
        a_it = track.ArcIt(graph)
        for a in a_it:
            a_id = graph.id(a)
            graph.addArcLabel(a, arc_solutions[a_id])
        graph.set_injected_solution()
    else:
        raise AssertionError("Nodes did disagree, cannot create stitched solution")