parser = argparse.ArgumentParser()
	parser.add_argument("input", help='input file that contains the point cloud', type=str)
	parser.add_argument("output", help='path to the output file', type=str)
	parser.add_argument("--slice", "-s", help='slice size (default: 15000)', type=int, default=15000)
	args = parser.parse_args()

	# Load input file
	pcd_list = ut.loadPoints(args.input, [2,3,4])

	# Compute the number of iterations to do given the slice size
	to_process = len(pcd_list)
	slice_size = args.slice
	iterations = to_process/slice_size
	if (iterations == 0 or to_process%slice_size == 0):
		iterations += 1
	largest_clusters = []
	i = 0

	for i in range(iterations):
		begin = i*slice_size
		if (i+1)*slice_size < to_process:
			end = (i+1)*slice_size
		else:
			end = to_process 
		# Get the largest clusters
		largest_clusters += noise_removal(pcd_list[begin:end], ratio=0.1)
	largest_clusters = np.array(largest_clusters)
	
	ut.savePoints(largest_clusters, args.output, [0,1,2])
	viewer(largest_clusters, c=largest_clusters[:,2], s=2)
예제 #2
0
        'lidar_id': 1,
        'lat': 2,
        'lon': 3,
        'ele': 4,
        'val': 5
    }
    fields_out = {
        0: 'id',
        1: 'lidar_id',
        2: 'lat',
        3: 'lon',
        4: 'ele',
        5: 'val'
    }

    for c in chunks:
        pcd_list = ut.loadPoints(c, fields_in)
        # filter on elevation by getting the points between the 35th
        # percentile and the 65th percentile for each chunk
        elevations = [pcd["ele"] for pcd in pcd_list]
        options["minele"] = np.percentile(elevations, 35)
        options["maxele"] = np.percentile(elevations, 65)

        filtered_list += filterPoints(pcd_list, options)
        i += 1.
        sys.stderr.write("%.2f\r" % float(100 * i / l))

    sys.stderr.write("\nDone\n")

    ut.savePoints(filtered_list, args.output, fields_out)
예제 #3
0
	# initialize options for the filter
	options = {}

	# filter on the intensity of the points
	# let's keep points that have an intensity value > 190

	options["minval"] = 190

	sys.stderr.write("Loading and filtering " + str(int(l)) + " chunks...\n")

	filtered_list = []
	fields_in = {'id':0, 'lidar_id':1, 'lat':2, 'lon':3, 'ele':4, 'val': 5}
	fields_out = {0:'id', 1:'lidar_id', 2:'lat', 3:'lon', 4:'ele', 5:'val'}

	for c in chunks:
		pcd_list = ut.loadPoints(c, fields_in)
		# filter on elevation by getting the points between the 35th
		# percentile and the 65th percentile for each chunk
		elevations = [pcd["ele"] for pcd in pcd_list]
		options["minele"] = np.percentile(elevations, 35)
		options["maxele"] = np.percentile(elevations, 65)

		filtered_list += filterPoints(pcd_list, options)
		i += 1.
		sys.stderr.write("%.2f\r" % float(100*i/l))

	sys.stderr.write("\nDone\n")

	ut.savePoints(filtered_list, args.output, fields_out)