示例#1
0
    def test_fast_segmentation(self):
        n = 360
        k = 3
        epsilon = 1

        generate_input_file(n)
        data = np.genfromtxt("input.csv", delimiter=" ")
        p = np.c_[np.mgrid[1:n + 1], data]

        D = Coreset.build_coreset(p, k, epsilon)
        print len(D)
        x = np.empty((0, 4))
        for coreset in D:
            print "coreset range", coreset.e - coreset.b + 1
            pts = utils.pt_on_line(xrange(int(coreset.b), int(coreset.e) + 1), coreset.g)
            # TODO: 2nd parameter should be epsilon
            w = Coreset.PiecewiseCoreset(len(pts[0]), epsilon)
            p_coreset = np.column_stack((pts[0], pts[1], pts[2], w))
            p_coreset_filtered = p_coreset[p_coreset[:, 3] > 0]
            # print "weighted points", p_coreset_filtered
            x = np.append(x, p_coreset_filtered, axis=0)
        print "num of weighted points", len(x)
        dividers = ksegment.coreset_k_segment_fast_segmentation(x, k)
        print "dividers", dividers
        print "dividers-cost:", utils.calc_cost_dividers(p, dividers)
        utils.visualize_3d(p, dividers)
示例#2
0
    def test_basic_demo(self):
        # dimension = 2
        k = 3
        epsilon = 0.5
        n = 600

        generate_input_file(n)
        data = np.genfromtxt("input.csv", delimiter=" ")
        p = np.c_[np.mgrid[1:n + 1], data]

        coreset = Coreset.build_coreset(p, k, epsilon)
        dividers = ksegment.coreset_k_segment(coreset, k)
        utils.visualize_3d(p, dividers)
示例#3
0
    def test_fast_segmentation(self):
        n = 600
        k = 3
        epsilon = 10

        generate_input_file(n)
        data = np.genfromtxt("input.csv", delimiter=" ")
        p = np.c_[np.mgrid[1:n + 1], data]

        D = Coreset.build_coreset(p, k, epsilon)
        dividers = ksegment.coreset_k_segment_fast_segmentation(D, k, epsilon)
        print "dividers", dividers
        print "dividers-cost:", utils.calc_cost_dividers(p, dividers)
        utils.visualize_3d(p, dividers)
示例#4
0
    def test_coreset_merging(self):
        # generate points
        n = 120
        # dimension = 2
        k = 3
        epsilon = 0.1

        # data = random_data(N, dimension)
        # for example1 choose N that divides by 6
        data = example1(n)

        p = np.c_[np.mgrid[1:n + 1], data]

        coreset = Coreset.build_coreset(p, k, epsilon)
        coreset_of_coreset = Coreset.build_coreset(coreset, k, epsilon, is_coreset=True)
        dividers = ksegment.coreset_k_segment(coreset_of_coreset, k)
        utils.visualize_3d(p, dividers)
示例#5
0
def plot_3d(size, map_indices, path_indices):
    # load network
    abstraction_vin = Abstraction_VIN_3D(size)

    # load network state
    if os.path.isfile('network/%s.pt' % abstraction_vin.name):
        abstraction_vin.load_state_dict(
            torch.load('network/%s.pt' % abstraction_vin.name))
        abstraction_vin.to(abstraction_vin.device)
    else:
        print("Abstraction_VIN was not trained.")
        return

    for map_index, path_index in zip(map_indices, path_indices):
        map = dataset.grids[map_index]  # environment map
        optimal_path = dataset.expert_paths[map_index][path_index]
        start = optimal_path[0]
        goal = optimal_path[-1]
        print('Map index: %d' % map_index)
        print('Path index: %d' % path_index)
        print('Optimal path length:  %f' %
              get_path_length(optimal_path, dim=3))

        # predict path
        abstraction_vin_path, abstraction_vin_success = _get_path_3d(
            abstraction_vin, dataset, map, map_index, start, goal,
            2 * len(optimal_path))
        abstraction_vin_path = torch.stack(abstraction_vin_path, dim=0)
        if abstraction_vin_success:
            print('Abstraction_VIN path length:  %f' %
                  get_path_length(abstraction_vin_path, dim=2))
        else:
            print('Abstraction_VIN: No path found.')

        # plot paths
        visualize_3d(map, goal, optimal_path, abstraction_vin_path)