Esempio n. 1
0
def measures_to_accumulators_dict(data):
    """
    Use polar representation

    :param data:
    :return:
    """
    print("-----------measures_to_accumulators_dict-----------")
    for i, measure in enumerate(data):
        good_data = dacl.keep_good_measures(measure, 30)

        cartesian_good_data = outr.one_turn_to_cartesian_points(good_data)

        # cartesian_good_data, translation = outr.change_basis(cartesian_good_data)

        # width, height = outr.get_width_height(cartesian_good_data)
        # width, height = int(width), int(height)
        # print(width, height)
        # image = np.zeros((int(width), int(height)), dtype=np.uint8)

        print(i)
        accumulator, thetas, rhos = outr.hough_transform_to_dict(cartesian_good_data)
        # skio.imsave("accumulator_"+str(i) + '.jpg', accumulator)

        accum, angles, dists, extrema = outr.take_brightest_points(accumulator, thetas, rhos)

        # print(extrema)

        # accum, angles, dists = sktr.hough_line_peaks(accumulator, thetas, rhos, 10, 12, num_peaks=3)
        print(accum, angles, dists)
        # idx, theta, rho = outr.peak_votes(accumulator, thetas, rhos)
        reconstruct_image_with_lines_polar(good_data, angles, dists)
Esempio n. 2
0
def measures_to_images(data):
    for i, measure in enumerate(data):
        good_data = dacl.keep_good_measures(measure, 30)

        cartesian_good_data = outr.one_turn_to_cartesian_points(good_data)

        cartesian_good_data, translation = outr.change_basis(cartesian_good_data)

        width, height = outr.get_width_height(cartesian_good_data)
        image = np.zeros((int(width), int(height)), dtype=np.uint8)

        display_measures(cartesian_good_data)
        # skio.imsave(str(i)+'.jpg', image)
        print(i)
Esempio n. 3
0
def main_clustering():
    one_turn_measure = get_realistic_data()[0]
    one_turn_measure = dacl.keep_good_measures(one_turn_measure, 30)
    one_turn_measure = dacl.keep_not_too_far_or_not_too_close(one_turn_measure)
    one_turn_measure = [
        np.array([np.deg2rad(measure[0]), measure[1]])
        for measure in one_turn_measure
    ]
    cartesian_one_turn_measure = outr.one_turn_to_cartesian_points(
        one_turn_measure)
    cartesian_one_turn_measure = [
        np.array(measure) for measure in cartesian_one_turn_measure
    ]
    clusters, means, closest_points = clus.clusterize(
        cartesian_one_turn_measure)
    return clusters, means, closest_points
Esempio n. 4
0
def display_measures(polar_points):
    # pl.ion()
    fig = pl.figure()
    ax = fig.add_subplot(111)
    ax.clear()
    ax.set_xlim(-distance_max_x_cartesien, distance_max_x_cartesien)
    ax.set_ylim(-distance_max_y_cartesien, distance_max_y_cartesien)
    ax.axhline(0, 0)
    ax.axvline(0, 0)
    # pl.grid()

    points = outr.one_turn_to_cartesian_points(polar_points)

    xx = []
    yy = []
    for x, y in points:
        xx.append(x)
        yy.append(y)

    pl.plot(xx, yy, 'r,')
    fig.canvas.draw()
    pl.show()
Esempio n. 5
0
def reconstruct_image_with_lines_polar(polar_points, angles, dists):
    # print(angles)
    # print(dists)
    # fig = pl.figure()
    # ax = fig.add_subplot(111)
    # ax.clear()
    # # ax.set_xlim(0, distance_max_x_cartesien)
    # ax.set_xlim(-int(distance_max_x_cartesien/2), int(distance_max_x_cartesien/2))
    # # ax.set_ylim(0, distance_max_y_cartesien)
    # ax.set_ylim(-int(distance_max_y_cartesien/2), int(distance_max_y_cartesien/2))
    # ax.axhline(0, 0)
    # ax.axvline(0, 0)
    # # pl.grid()
    #
    xx = []
    yy = []
    # for x, y in points:
    #     xx.append(x)
    #     yy.append(y)
    # pl.plot(xx, yy, 'r.')
    cartesian_good_data = outr.one_turn_to_cartesian_points(polar_points)
    width, height = outr.get_width_height(cartesian_good_data)
    diag_len = int(round(math.sqrt(width * width + height * height)))
    # cartesian_good_data, translation = outr.change_basis(cartesian_good_data)

    for theta, rho in polar_points:
        pl.polar(math.radians(theta), rho, 'r,')
    # pl.show()

    # fig.canvas.draw()
    print("fini ?")
    for theta, rho in zip(angles, dists):
        # print("oui ?")
        xxx, yyy = [], []
        # print(-int(distance_max_x_cartesien / 2), int(distance_max_x_cartesien / 2))
        # cost = math.cos(theta)
        # sint = math.sin(theta)
        # x0 = rho*cost
        # y0 = rho*sint
        # x1 = x0 - 10000*sint
        # y1 = y0 + 10000*cost
        # x2 = x0 + 10000*sint
        # y2 = y0 - 10000*cost
        # pl.plot(x1, y1, 'b-')
        # pl.plot(x2, y2, 'b-')
        # for x in range(0, distance_max_x_cartesien):
        for x in range(-int(distance_max_x_cartesien/2), int(distance_max_x_cartesien/2)):
            # print(x)
            y = rho / math.sin(theta) - (math.cos(theta)/math.sin(theta)) * x - diag_len
            a, d = outr.cartesian_to_polar([x, y])

            # print("y", y)
            # xxx.append(x)
            # yyy.append(y)
            xxx.append(a)
            yyy.append(d)
            # print("cartesian", [x+translation[0], -y-translation[1]])
            # a, d = outr.cartesian_to_polar([x-translation[0], (y-translation[1])])

            # print("polar", a, d)
            # pl.polar(a, d, "b-")
        pl.polar(xxx, yyy, 'b-')
        # pl.plot(xxx, yyy, 'b-')
        # fig.canvas.draw()
    pl.show()
Esempio n. 6
0
def main():
    logger = logging.getLogger(log_filename)
    logger.info("On a lancé le script de match")

    # region # thread initialisation
    t_lidar = datr.LidarThread(log_filename)
    t_lidar.start()

    t_ll = datr.EncoderThread(log_filename)
    t_ll.start()

    t_hl = comm.HLThread(log_filename)
    t_hl.start()

    logger.info("Fils de communication lancés")

    time.sleep(3)
    # endregion

    # region # variable initialisation
    start_enemy_positions = []
    own_colour_team = None
    computed_opponent_robot_position = False
    previous_beacons = []
    previous_self_positions = []
    previous_clusters = []
    previous_opponent_robots = []
    # endregion

    # region # before the match
    one_turn_points = dacl.filter_points(t_lidar.get_measures(), 50)
    one_turn_clusters = clus.clusterize(one_turn_points)

    while not t_hl.has_match_begun():
        # team colour
        if t_hl.get_team_colour() is not None:
            # computes the position
            if t_hl.get_team_colour() == TeamColor.purple:
                start_enemy_positions = eloc.find_robots_in_purple_zone(
                    one_turn_clusters)
                computed_opponent_robot_position = True
                own_colour_team = TeamColor.purple
                logger.info("On est violet")

            elif t_hl.get_team_colour() == TeamColor.orange:
                start_enemy_positions = eloc.find_robot_in_orange_zone(
                    one_turn_clusters)
                computed_opponent_robot_position = True
                own_colour_team = TeamColor.orange
                logger.info("On est orange")

            # retrieves and filters measures
            one_turn_points = dacl.filter_points(t_lidar.get_measures(), 50)
            cartesian_one_turn_points = outr.one_turn_to_cartesian_points(
                one_turn_points)
            one_turn_clusters = clus.clusterize(cartesian_one_turn_points)

            # send the positions of the opponent robots
            if computed_opponent_robot_position:
                for enemy_position in start_enemy_positions:
                    t_hl.send_robot_position(*enemy_position)
    # endregion

    logger.info("Le match vient de commencer")

    # region # match
    while not t_hl.has_match_stopped():
        # region # measures
        one_turn_points = dacl.filter_points(t_lidar.get_measures(),
                                             THRESHOLD_QUALITY)
        cartesian_one_turn_measure = outr.one_turn_to_cartesian_points(
            one_turn_points)
        clusters = clus.clusterize(cartesian_one_turn_measure)
        # endregion

        # region # retrieves position from encoders
        proprioceptive_position = datr.from_encoder_position_to_lidar_measure(
            *t_ll.get_measures())
        # endregion

        # region # estimations of positions
        robots = eloc.find_robots(clusters)
        # endregion

        # region history management
        previous_clusters.append(clusters.copy())
        if len(previous_clusters) > 3:
            previous_clusters.pop(0)

        previous_opponent_robots.append(robots.copy())
        if len(previous_opponent_robots) > 3:
            previous_opponent_robots.pop(0)
        # endregion

        time.sleep(1)
    # endregion
    logger.info("Le match est fini")
    # region # end of match

    t_lidar.close_connection()
    t_hl.close_connection()
    t_ll.close_connection()
    time.sleep(3)
    logger.info("On a fermé les connexions")
Esempio n. 7
0
def main():
    logger = logging.getLogger(log_filename)
    logger.info("On a lancé le script de match")

    # region # thread initialisation
    t_lidar = datr.LidarThread(log_filename)
    t_lidar.start()

    t_ll = datr.EncoderThread(log_filename)
    t_ll.start()

    t_hl = comm.HLThread(log_filename)
    t_hl.start()

    logger.info("Fils de communication lancés")

    # endregion

    # region # variable initialisation
    own_colour_team = None
    # endregion

    # region # before the match
    logger.info("Premières mesures")
    one_turn_points = dacl.filter_points(t_lidar.get_measures(),
                                         QUALITY_THRESHOLD)
    cartesian_one_turn_measure = outr.one_turn_to_cartesian_points(
        one_turn_points)
    one_turn_clusters, means, closest_points = clus.clusterize(
        cartesian_one_turn_measure)
    one_turn_clusters = clus.Cluster.to_clusters(one_turn_clusters,
                                                 closest_points)

    print("Il y a ", len(one_turn_clusters), "clusters.")

    logger.info("Match a commencé : " + str(t_hl.has_match_begun()))
    logger.info("taille clusters : " + str(len(one_turn_clusters)))

    while not t_hl.has_match_begun():
        # team colour
        if t_hl.get_team_colour():
            logger.info("La couleur : " + t_hl.get_team_colour().name)
        else:
            logger.warning("Pas de couleur")
        if t_hl.get_team_colour() is not None:
            # computes the position
            if t_hl.get_team_colour().value == TeamColor.purple.value:
                own_colour_team = TeamColor.purple
                logger.info("On est violet")

            elif t_hl.get_team_colour().value == TeamColor.orange.value:
                own_colour_team = TeamColor.orange
                logger.info("On est orange")
            if own_colour_team:
                if t_hl.expecting_shift:
                    last_states = []

                    for i in range(n_measures_for_median):
                        t1 = time.time()

                        # region # measures
                        one_turn_points = dacl.filter_points(
                            t_lidar.get_measures(), THRESHOLD_QUALITY)
                        odo_measure = t_ll.get_measures()[:3]
                        # endregion

                        # region lidar data processing
                        cartesian_one_turn_measure = outr.one_turn_to_cartesian_points(
                            one_turn_points)
                        clusters, means, closest_points = clus.clusterize(
                            cartesian_one_turn_measure)
                        one_turn_clusters = clus.Cluster.to_clusters(
                            clusters, closest_points)
                        # endregion

                        t2 = time.time()
                        logger.debug(
                            "temps entre la mesure et le regroupement " +
                            str(t2 - t1))

                        # region # retrieves position from encoders

                        logger.debug("raw measure of odometry " +
                                     str(odo_measure))
                        proprioceptive_position = datr.from_encoder_position_to_lidar_measure(
                            *odo_measure)
                        logger.debug("odometry position from lidar " +
                                     str(proprioceptive_position))
                        # endregion
                        t3 = time.time()
                        logger.debug(
                            "Durée de récupération de la mesure des codeuses "
                            + str(t3 - t2))

                        # region # estimations of positions

                        beacons = sloc.find_beacons_with_odometry(
                            one_turn_clusters, proprioceptive_position,
                            own_colour_team, log_filename)
                        t4 = time.time()
                        logger.debug(
                            "durée d'estimation de la position des balises à partir de l'odométrie "
                            + str(t4 - t3))
                        estimated_position, estimated_orientation = sloc.compute_own_state(
                            beacons, own_colour_team, log_filename)
                        t5 = time.time()
                        logger.debug(
                            "Durée d'estimation de l'état du robot grâce au LiDAR"
                            + str(t5 - t4))
                        if estimated_position is None or estimated_orientation is None:
                            logger.warning(
                                "On n'a pas pu trouver les balises nécessaires à la localisation du robot."
                            )
                        else:
                            logger.info("Notre position corrigée : " +
                                        str(estimated_position))
                            logger.info("Notre orientation corrigée : " +
                                        str(estimated_orientation))
                            hl_own_state = np.array([
                                estimated_position.x, estimated_position.y,
                                estimated_orientation
                            ])
                            logger.debug("lidar : " + str(hl_own_state))
                            logger.debug("odo : " +
                                         str(proprioceptive_position))
                            logger.debug("décalage : " +
                                         str(hl_own_state -
                                             proprioceptive_position))
                            last_states.append(hl_own_state -
                                               proprioceptive_position)
                        t6 = time.time()
                        logger.debug(
                            "temps entre la mesure lidar et le retour " +
                            str(t6 - t1))
                        time.sleep(0.1)
                    if len(last_states) == 3:
                        last_states = sloc.crop_angles(last_states)
                        median_shifts = sloc.choose_median_state(last_states)
                        t_hl.set_recalibration(median_shifts)
                        t_hl.send_shift()

                    else:
                        logger.debug('Aucun recalage possible.')
                        t_hl.set_recalibration(None)
                        t_hl.send_shift()

        time.sleep(0.05)
    # endregion
    logger.info("Le match vient de commencer")

    # region # match
    while not t_hl.has_match_stopped():
        if own_colour_team is None:
            logger.debug(
                'On n\'a pas reçu de couleur donc le script lidar ne fait rien'
            )
            time.sleep(1)
        else:
            if t_hl.expecting_shift:
                last_states = []

                for i in range(n_measures_for_median):
                    t1 = time.time()

                    # region # measures
                    one_turn_points = dacl.filter_points(
                        t_lidar.get_measures(), THRESHOLD_QUALITY)
                    odo_measure = t_ll.get_measures()[:3]
                    # endregion

                    # region lidar data processing
                    cartesian_one_turn_measure = outr.one_turn_to_cartesian_points(
                        one_turn_points)
                    clusters, means, closest_points = clus.clusterize(
                        cartesian_one_turn_measure)
                    one_turn_clusters = clus.Cluster.to_clusters(
                        clusters, closest_points)
                    # endregion

                    t2 = time.time()
                    logger.debug("temps entre la mesure et le regroupement " +
                                 str(t2 - t1))

                    # region # retrieves position from encoders

                    logger.debug("raw measure of odometry " + str(odo_measure))
                    proprioceptive_position = datr.from_encoder_position_to_lidar_measure(
                        *odo_measure)
                    logger.debug("odometry position from lidar " +
                                 str(proprioceptive_position))
                    # endregion
                    t3 = time.time()
                    logger.debug(
                        "Durée de récupération de la mesure des codeuses " +
                        str(t3 - t2))

                    # region # enemy position
                    # for enemy_position in eloc.find_robots(one_turn_clusters):
                    #     t_hl.send_robot_position(*enemy_position)
                    # endregion

                    # region # estimations of positions

                    beacons = sloc.find_beacons_with_odometry(
                        one_turn_clusters, proprioceptive_position,
                        own_colour_team, log_filename)
                    t4 = time.time()
                    logger.debug(
                        "durée d'estimation de la position des balises à partir de l'odométrie "
                        + str(t4 - t3))
                    estimated_position, estimated_orientation = sloc.compute_own_state(
                        beacons, own_colour_team, log_filename)
                    t5 = time.time()
                    logger.debug(
                        "Durée d'estimation de l'état du robot grâce au LiDAR"
                        + str(t5 - t4))
                    if estimated_position is None or estimated_orientation is None:
                        logger.warning(
                            "On n'a pas pu trouver les balises nécessaires à la localisation du robot."
                        )
                    else:
                        logger.info("Notre position corrigée : " +
                                    str(estimated_position))
                        logger.info("Notre orientation corrigée : " +
                                    str(estimated_orientation))
                        hl_own_state = np.array([
                            estimated_position.x, estimated_position.y,
                            estimated_orientation
                        ])
                        logger.debug("lidar : " + str(hl_own_state))
                        logger.debug("odo : " + str(proprioceptive_position))
                        logger.debug("décalage : " +
                                     str(hl_own_state -
                                         proprioceptive_position))
                        last_states.append(hl_own_state -
                                           proprioceptive_position)
                    t6 = time.time()
                    logger.debug("temps entre la mesure lidar et le retour " +
                                 str(t6 - t1))
                    time.sleep(0.1)
                if len(last_states) > 0:
                    last_states = sloc.crop_angles(last_states)
                    median_shifts = sloc.choose_median_state(last_states)
                    t_hl.set_recalibration(median_shifts)
                    t_hl.send_shift()

                else:
                    logger.debug('Aucun recalage possible.')
                    t_hl.set_recalibration(None)
                    t_hl.send_shift()

                # endregion

                # endregion

        time.sleep(0.05)
    # endregion
    logger.info("Le match est fini")
    # region # end of match
    t_lidar.close_connection()
    t_hl.close_connection()
    t_ll.close_connection()
    time.sleep(3)
    logger.info("On a fermé les connexions")
Esempio n. 8
0
def display_measures_and_table():
    identifier = 1
    p1 = Point(0, -1820)  # *-1
    # p2 = Point(-1210, -1400)  # *-1
    p2 = Point(-1300, -1400)  # *-1
    p3 = Point(1210, -1400)  # *-1

    v1, v2, v3 = Vector(), Vector(), Vector()
    v1.set_coordinates(p1.x, p1.y)
    v2.set_coordinates(p2.x, p2.y)
    v3.set_coordinates(p3.x, p3.y)
    translation_vectors = [v1, v2, v3]

    # measures retrieval
    samples = ["0_-1820_pi_over_2", "1210_1400_pi", "-1210_1400_0"]

    measures = get_table_measures(samples[identifier])
    one_turn_measures = []
    for i in range(len(measures)):
        one_turn_measure = dacl.keep_good_measures(measures[i], 100)
        one_turn_measure = dacl.keep_not_too_far_or_not_too_close(
            one_turn_measure)
        one_turn_measure = outr.one_turn_to_cartesian_points(one_turn_measure)
        one_turn_measures.append(one_turn_measure)

    # table instantiation
    table = Table()

    beacon_1 = Square([
        Point(-1500 - 100, 2000),
        Point(-1500, 2000),
        Point(-1500, 2000 - 100),
        Point(-1500 - 100, 2000 - 100)
    ])
    beacon_2 = Square([
        Point(1500, 1000 + 50),
        Point(1500 + 100, 1000 + 50),
        Point(1500 + 100, 1000 - 50),
        Point(1500, 1000 - 50)
    ])
    beacon_3 = Square([
        Point(-1500 - 100, 0 + 100),
        Point(-1500, 0 + 100),
        Point(-1500, 0),
        Point(-1500 - 100, 0)
    ])

    # beacon_1.take_symmetric()
    # beacon_2.take_symmetric()
    # beacon_3.take_symmetric()

    table.add_square_obstacle(beacon_1)
    table.add_square_obstacle(beacon_2)
    table.add_square_obstacle(beacon_3)

    table.add_edge_point(Point(-1500, 0))
    table.add_edge_point(Point(1500, 0))
    table.add_edge_point(Point(1500, 2000))
    table.add_edge_point(Point(-1500, 2000))

    # table.translate(translation_vectors[identifier])

    # rotation_angle = 0.5
    # rotation_angle = np.pi/2
    # table.rotate(rotation_angle)

    # measure = Point(0, 1800)
    # measure = translation_vector.apply_to_point(measure)
    # measure.rotate(rotation_angle)

    table.init_plot()
    table.plot_edges()
    table.plot_lidar_measures(one_turn_measures[1])

    table.plot_obstacles()
    # table.plot_measures(measure, vectors, robot_vector)
    table.plot()