Example #1
0
def cal_connect_point(cur_state, env):
    robot_loc = [-cur_state[-1][0][5], 0.5, cur_state[-1][0][3]]
    target_loc = [-cur_state[-1][0][2], 0.5, cur_state[-1][0][0]]
    dis_env = env
    connect_point = theta(dis_env, (robot_loc[0], robot_loc[-1]),
                          (target_loc[0], target_loc[-1]))
    return connect_point
Example #2
0
    def cal_connect_point(self, env):
        (DecisionSteps, TerminalSteps) = env.get_steps(self.behavior_names[0])
        vector_obs = DecisionSteps.obs[-1][0]
        robot_start = vector_obs[3:6]
        target_loc = vector_obs[:3]
        dis_env = np.load('env_3.npy')
        robot_change = np.array(change_axis(dis_env, robot_start))
        target_change = np.array(change_axis(dis_env, target_loc))
        #print('start_loc:',robot_change, '  Target_loc:', target_change)
        #if robot_change[0] <=0:
        #    robot_change[0] = 0
        #if robot_change[-1] <=0:
        #    robot_change[-1] = 0
        #if robot_change[0] >=39:
        #    robot_change[0] = 39
        #if robot_change[-1] >=39:
        #    robot_change[-1] = 39

        #if target_change[0] <=0:
        #    target_change[0] = 0
        #if target_change[-1] <=0:
        #    target_change[-1] = 0
        #if target_change[0] >=39:
        #    target_change[0] = 39
        #if target_change[-1] >=39:
        #    target_change[-1] = 39

        connect_point = theta(dis_env, (robot_change[0], robot_change[-1]),
                              (target_change[0], target_change[-1]))
        return connect_point
Example #3
0
def pred_test(env, target_loc, sensor_loc, robot_loc=None):
    sensor_dir = []
    for i in range(len(sensor_loc)):
        s_x, s_z = sensor_loc[i]
        s_path = theta(env, (int(s_x), int(s_z)),
                       (int(target_loc[0]), int(target_loc[-1])))
        if s_path[0] == s_path[1] or len(s_path) == 2:
            s_angle = angle2xy((s_x, s_z), (target_loc[0], target_loc[-1]))
        else:
            s_angle = angle2xy(s_path[0], s_path[1])
        sensor_dir.append(s_angle)
    return sensor_dir
Example #4
0
def load_label(select_env, num_sensors, image_index):
    all_sensors = []
    for i in range(num_sensors):
        all_sensors.append('sensor_{}'.format(i + 1))
    all_target_label = []

    env_map = np.load('training/env_{}.npy'.format(select_env))
    sen_loc = np.load('training/env_{}_sensor.npy'.format(select_env))
    tar_loc = read_target('training/env_{}/target_loc.txt'.format(select_env),
                          image_index)

    for i in range(len(image_index)):
        target_label = []
        for j in range(num_sensors):
            sensor_dir = []
            s_x, s_z = sen_loc[0][j], sen_loc[1][j]
            s_path = theta(
                env_map, (s_x, s_z),
                (math.ceil(tar_loc[i][0]), math.ceil(tar_loc[i][1])))
            s_angle = angle2xy(s_path[0], s_path[1])
            sensor_dir.append(s_angle)
            target_label.append(sensor_dir)
        all_target_label.append(target_label)
    return all_target_label
Example #5
0
def load_data(num_iter,
              select_env,
              num_sensors,
              sen_locs,
              batch_size=data_per_epoch):
    target_loc = []
    robot_loc = []

    select_case = np.arange(batch_size)
    #select_case = [np.random.randint(500) for _ in range(batch_size)]
    tar_loc = read_target(
        'train_data1101/epoch_{}_target.txt'.format(num_iter), select_case)
    rob_loc = read_target('train_data1101/epoch_{}_robot.txt'.format(num_iter),
                          select_case)
    dis_env = np.load('train_data1101/env_map/env_{}.npy'.format(select_env +
                                                                 1))
    target_loc.append(tar_loc)
    robot_loc.append(rob_loc)

    target_label = []
    batch_input = []
    filePath = 'training/sensor_1/1'
    filelist = os.listdir(filePath)
    filelist.sort(key=lambda x: int(x[:-4]))
    for s_i in range(len(select_case)):
        sensor_dir = []
        image_input = np.zeros((num_sensors, 84, 84 * 4, 3))
        for j in range(num_sensors):
            s_x, s_z = sen_locs[j]
            s_path = theta(dis_env, (round(s_x), round(s_z)),
                           (round(-tar_loc[s_i][1]), round(tar_loc[s_i][0])))
            if s_path[0] == s_path[1]:
                #print('wrong position!')
                s_angle = angle2xy((s_x, s_z),
                                   (tar_loc[s_i][0], tar_loc[s_i][1]))
            elif len(s_path) == 2:
                s_angle = angle2xy((s_x, s_z),
                                   (tar_loc[s_i][0], tar_loc[s_i][1]))
            else:
                s_angle = angle2xy(s_path[0], s_path[1])
            sensor_dir.append(s_angle)

            #######################################################image
            sensor_path = 'training/sensor_' + str(j + 1)
            img_1 = image.load_img(sensor_path + '/1/' +
                                   filelist[select_case[s_i]],
                                   target_size=(84, 84))  #height-width
            img_array_1 = image.img_to_array(img_1)
            img_2 = image.load_img(sensor_path + '/2/' +
                                   filelist[select_case[s_i]],
                                   target_size=(84, 84))  #height-width
            img_array_2 = image.img_to_array(img_2)
            img_3 = image.load_img(sensor_path + '/3/' +
                                   filelist[select_case[s_i]],
                                   target_size=(84, 84))  #height-width
            img_array_3 = image.img_to_array(img_3)
            img_4 = image.load_img(sensor_path + '/4/' +
                                   filelist[select_case[s_i]],
                                   target_size=(84, 84))  #height-width
            img_array_4 = image.img_to_array(img_4)
            image_input[j, :, 84 * 3:84 * 4, :] = img_array_1 / 255
            image_input[j, :, 84 * 2:84 * 3, :] = img_array_2 / 255
            image_input[j, :, 84 * 1:84 * 2, :] = img_array_3 / 255
            image_input[j, :, 84 * 0:84 * 1, :] = img_array_4 / 255

        target_label.append(sensor_dir)
        batch_input.append(image_input)

    return batch_input, target_label
Example #6
0
all_action, all_locs, all_target, all_done = [], [], [], []
for i in range(100):
    z_action, z_locs, z_target, z_done = agent.test3(map_path='env_')
    #z_action, z_locs, z_target, z_done = agent.test_large(move_action=move_action)
    all_action.append(z_action)
    all_locs.append(z_locs)
    all_target.append(z_target)
    all_done.append(z_done)

succ_rate = []
all_dis, all_dis_robot = [], []
for j in range(len(all_done)):
    if all_done[j] == 1:
        succ_rate.append(1)
    true_path = theta(env_map,
                      (round(all_locs[j][0][0]), round(all_locs[j][0][-1])),
                      (round(all_target[j][0]), round(all_target[j][-1])))
    true_dis = 0
    for k in range(len(true_path) - 1):
        true_dis = true_dis + np.sqrt(
            (true_path[k + 1][0] - true_path[k][0])**2 +
            (true_path[k + 1][-1] - true_path[k][-1])**2)
    robot_dis = 0
    for k in range(len(all_locs[j]) - 1):
        robot_dis = robot_dis + np.sqrt(
            (all_locs[j][k + 1][0] - all_locs[j][k][0])**2 +
            (all_locs[j][k + 1][-1] - all_locs[j][k][-1])**2)

    robot_dis = robot_dis + np.sqrt(
        (all_locs[j][k + 1][0] - all_target[j][0])**2 +
        (all_locs[j][k + 1][-1] - all_target[j][-1])**2)
Example #7
0
all_sensor_view = []
for i in range(len(lines)):
    label_index = lines[select_case[i]].index(')')
    label_target = int(lines[select_case[i]][label_index + 1:-1])
    x_index_1 = lines[select_case[i]].index('(')
    x_index_2 = lines[select_case[i]].index(',')
    label_x = float(lines[select_case[i]][x_index_1 + 1:x_index_2])
    z_index_1 = lines[select_case[i]].index(',', x_index_2 + 1)
    z_index_2 = lines[select_case[i]].index(')')
    label_z = float(lines[select_case[i]][z_index_1 + 2:z_index_2])
    #t_x, t_y, t_z = change_axis(env, (label_x, 0, label_z))
    t_x, t_y, t_z = (label_x, 0, label_z)
    sensor_direction = []
    is_view = []
    all_view = []
    for j in range(num_sensors):
        #s_x, s_y, s_z = change_axis(env, sensor_loc[j])
        s_x, s_y, s_z = sensor_loc[j]
        #s_path = AStarSearch(env, (round(s_x), round(s_z)), (round(t_x), round(t_z)))
        s_path = theta(env, (round(s_x), round(s_z)), (round(t_x), round(t_z)))
        if len(s_path) == 2:
            is_view.append(1)
        else:
            is_view.append(0)
        if s_path[0] == s_path[1]:
            print('num_group:', select_group, '   num_case:', select_case[i],
                  '   num_sensor:', j)
            print('target_loc:', (label_x, 0, label_z))
    all_sensor_view.append(is_view)
np.save('is_view_env4_{}'.format(select_group), np.array(all_sensor_view))
Example #8
0
    def cal_angle2(self, cur_state, env):
        #connect_point = cal_connect_point(cur_state)
        cur_loc = (round(-cur_state[-1][0][5]), round(cur_state[-1][0][3]))
        target_loc = (round(-cur_state[-1][0][2]), round(cur_state[-1][0][0]))
        is_view = []
        for connect_p in self.origin_connect_point[1:]:
            past_grid = []
            past_grid_int = []
            x_flag = 1
            y_flag = 1
            f_x = int
            f_y = int
            if (cur_loc[1] - connect_p[1]) >= 0:
                y_flag = -1
                f_y = np.ceil
            if (cur_loc[0] - connect_p[0]) >= 0:
                x_flag = -1
                f_x = np.ceil
            if abs(connect_p[0] - cur_loc[0]) <= abs(connect_p[1] -
                                                     cur_loc[1]):
                slocp = abs((connect_p[0] - cur_loc[0]) /
                            (connect_p[1] - cur_loc[1] + 1e-10))
                for j in range(int(abs(cur_loc[1] - connect_p[1]))):
                    cur_x, cur_y = (cur_loc[0] + j * slocp * x_flag,
                                    cur_loc[1] + j * y_flag)
                    fur_x, fur_y = (cur_loc[0] + (j + 1) * slocp * x_flag,
                                    cur_loc[1] + (j + 1) * y_flag)
                    past_grid.append((cur_x, cur_y))
                    if f_x(fur_x) != f_x(cur_x) and fur_x != f_x(fur_x):
                        past_grid_int.append((f_x(cur_x), f_y(cur_y)))
                        past_grid_int.append((f_x(fur_x), f_y(cur_y)))
                    else:
                        past_grid_int.append((f_x(cur_x), f_y(cur_y)))
            else:
                slocp = abs((connect_p[1] - cur_loc[1]) /
                            (connect_p[0] - cur_loc[0] + 1e-10))
                for j in range(int(abs(cur_loc[0] - connect_p[0]))):
                    cur_x, cur_y = (cur_loc[0] + j * x_flag,
                                    cur_loc[1] + slocp * j * y_flag)
                    fur_x, fur_y = (cur_loc[0] + (j + 1) * x_flag,
                                    cur_loc[1] + slocp * (j + 1) * y_flag)
                    past_grid.append((cur_x, cur_y))
                    if f_y(fur_y) != f_y(cur_y) and fur_y != f_y(fur_y):
                        past_grid_int.append((f_x(cur_x), f_y(cur_y)))
                        past_grid_int.append((f_x(cur_x), f_y(fur_y)))
                    else:
                        past_grid_int.append((f_x(cur_x), f_y(cur_y)))
            past_grid_int.append(connect_p)
            for c_i, cur_grid in enumerate(past_grid_int):
                if cur_grid[0] <= 0:
                    cur_grid = (0, cur_grid[1])
                if cur_grid[1] <= 0:
                    cur_grid = (cur_grid[0], 0)
            # if cur_grid[0] >=39:
            #     cur_grid = (39, cur_grid[1])
            # if  cur_grid[1]>=39:
            #     cur_grid = (cur_grid[0], 39)
                if env[int(cur_grid[0]), int(cur_grid[1])] == 0:
                    pass
                else:
                    #print(c_i)
                    break

                if c_i == len(past_grid_int) - 1:
                    is_view.append(connect_p)
        if len(is_view) == 0:
            new_connect_point = theta(env, (cur_loc[0], cur_loc[-1]),
                                      (target_loc[0], target_loc[-1]))
            is_view.append(new_connect_point[1])
        robot2cp = np.sqrt((cur_loc[0] - is_view[-1][0])**2 +
                           (cur_loc[-1] - is_view[-1][1])**2)
        robot2cp_angle = (is_view[-1][0] - cur_loc[0],
                          is_view[-1][1] - cur_loc[1])
        p_new = np.zeros((2, 1))
        p_new[0] = np.true_divide(
            robot2cp_angle[0],
            np.sqrt(robot2cp_angle[0]**2 + robot2cp_angle[1]**2) +
            ZERO_TOLERANCE)
        p_new[1] = np.true_divide(
            robot2cp_angle[1],
            np.sqrt(robot2cp_angle[0]**2 + robot2cp_angle[1]**2) +
            ZERO_TOLERANCE)
        return p_new