def getHigherBins(dataset, range_bin=60, nbeams=10):
    log("Coletando os maiores bins dos beams")

    new_dataset = list()
    for image in dataset:
        new_dataset.append(list())
        for beam in image:
            highers = list()
            for i, bin_ in enumerate(beam['raw']):
                if len(highers) < nbeams:
                    highers.append((bin_, i))
                else:
                    for j in range(len(highers)):
                        if bin_ > highers[j][0]:
                            highers[j] = (bin_, i)
                            break
            for i in range(len(highers)):
                aux = dict()
                aux['angle'] = beam['angle']
                aux['z'] = beam['z']
                aux['higher'] = dict()
                aux['higher']['value'] = highers[i][0]
                aux['higher']['dist'] = (highers[i][1] + 1) / len(
                    beam['raw']) * range_bin
                aux['higher']['index'] = highers[i][1]
                new_dataset[-1].append(aux)
    return new_dataset
Example #2
0
def removeMaxDistPoints(point_cloud, max_dist):
    log(" - Removendo pontos próximos ao sensor.")

    new_point_cloud = list()
    for point in point_cloud:
        if point['dist'] < max_dist:
            new_point_cloud.append(point)
    return new_point_cloud
Example #3
0
def removeExtAnglePoints(point_cloud, angle):
    log(" - Removendo pontos fora do ângulo de abertura horizontal")

    new_point_cloud = list()
    angle = radians(angle)
    for point in point_cloud:
        if abs(point['angle']) <= angle / 2:
            new_point_cloud.append(point)
    return new_point_cloud
Example #4
0
def downsamplerFilter(point_cloud, space=1):
    log("Removendo pontos próximos na nuvem")

    pc = parseToPointCloud(point_cloud)
    pc = pc.make_voxel_grid_filter()
    pc.set_leaf_size(space, space, space)
    pc = pc.filter()
    pc = pc.to_array()

    return pc
Example #5
0
def smoothingFilter(point_cloud):
    log("Suavizando da nuvem de pontos")

    pc = parseToPointCloud(point_cloud)
    pc = pc.make_moving_least_squares()
    pc.set_search_radius(5)
    pc.set_polynomial_fit(True)
    pc = pc.process()
    pc = pc.to_array()

    return pc
Example #6
0
def staticalOutlierFilter(point_cloud):
    log("Removendo ruídos da nuvem de pontos")

    pc = parseToPointCloud(point_cloud)

    pc = pc.make_statistical_outlier_filter()
    pc.set_mean_k(15)
    pc.set_std_dev_mul_thresh(1)
    pc = pc.filter()
    pc = pc.to_array()

    return pc
Example #7
0
def removeOutliers(point_cloud,
                   min_val=1.5,
                   min_dist=15,
                   max_dist=20,
                   angle=120):
    log("Removendo outliers da nuvem de pontos:")

    new_point_cloud = removeExtAnglePoints(point_cloud, angle)
    new_point_cloud = removeMinDistPoints(new_point_cloud, min_dist)
    new_point_cloud = removeMaxDistPoints(new_point_cloud, max_dist)
    new_point_cloud = removeMinValPoints(new_point_cloud, min_val)

    return new_point_cloud
def generatePointCloud(dataset):
    log("Gerando nuvem de pontos")

    refined = list()
    for image in dataset:
        for beam in image:
            x, y = polar2Cartesian(beam['higher']['dist'], beam['angle'])
            refined.append({
                'x': x,
                'y': y,
                'z': beam['z'],
                'value': beam['higher']['value'],
                'angle': beam['angle'],
                'dist': beam['higher']['dist']
            })
    return refined
Example #9
0
def removeMinValPoints(point_cloud, min_val):
    log(" - Removendo pontos com baixo valor de intensidade")

    new_point_cloud = list()
    minimum = float('inf')
    maximum = 0
    for point in point_cloud:
        if point['value'] < minimum:
            minimum = point['value']
        if point['value'] > maximum:
            maximum = point['value']

    min_val = (((maximum - minimum) / 100) * min_val) + minimum

    for point in point_cloud:
        if point['value'] >= min_val:
            new_point_cloud.append(point)
    return new_point_cloud
def getHigherBin(dataset, range_bin, image_size=179):
    log("Coletando os maiores bins dos beams")

    for image in dataset:
        for beam in image:
            higher = 0
            higher_index = -1
            for bin_ in beam['raw']:
                if bin_ > higher:
                    higher = bin_
                    higher_index = beam['raw'].index(bin_)
            beam['higher'] = {
                'value': higher,
                'dist': (higher_index + 1) / len(beam['raw']) * range_bin,
                'index': higher_index
            }
        if len(image) > image_size:
            image.remove(image[-1])
    return dataset
def splitDataset(dataset, image_size=180):
    log("Separando o dataset em imagens")

    if dataset[0]['angle'] < dataset[1]['angle']:
        clockwise = False
    else:
        clockwise = True
    data = list()
    image = list()
    previous = dataset[0]['angle']
    for line in dataset:
        cond_left = line['angle'] < previous and not clockwise
        cond_right = line['angle'] > previous and clockwise
        previous = line['angle']
        if cond_left or cond_right:
            clockwise = not clockwise
            if len(image) == image_size:
                data.append(image)
            image = list()
        image.append(line)
    data.append(image)
    return data
def getDataset(file):
    log("Coletando dados do dataset")

    dataset = list()
    f = open(file)
    for line in f.readlines()[1:]:
        aux = line.rstrip().split(',')
        beam = {
            'id': int(aux[0]),
            'angle': float(aux[1]),
            'x': float(aux[2]),
            'y': float(aux[3]),
            'z': float(aux[4]),
            'ox': float(aux[5]),
            'oy': float(aux[6]),
            'oz': float(aux[7]),
            'ow': float(aux[8]),
            'raw': aux[9:-1]
        }
        for i in range(len(beam['raw'])):
            beam['raw'][i] = float(beam['raw'][i])
        dataset.append(beam)
    return dataset