Beispiel #1
0
def mdr(paths_file: str, adv_agent_id: int, adv_agent_ds: int, out_file_name: str = None, robust_mode: str = 'DISABLE'):
    logging.getLogger('vgmapf.problems.mapf.multi_agent_pathfinding').setLevel(logging.INFO)
    paths_file = pathlib.Path(paths_file)
    lp = paths_serializer.load(paths_file)
    adv_agent = [a for a in lp.agents if a.id == adv_agent_id][0]
    adv_agent.is_adversarial = True
    adv_agent.damage_steps = adv_agent_ds

    robust_mode = getattr(mdr_finder.RobustPathMode, robust_mode)

    if not out_file_name:
        out_file_name = paths_file.parent / (
                paths_file.stem + f'-mdr-a_{adv_agent_id}-ds_{adv_agent_ds}' + paths_file.suffix)

    paths = {a.id: a.path for a in lp.agents}
    original_makespan = mdr_finder.get_makespan(paths, lp.agents)

    mdrf = mdr_finder.MaxDamageRouteFinder(lp.grid, lp.agents, astar.Searcher, lambda agnt: dict(
        h_func=mapf_heuristics.get_good_manhatten_like_heuristic(agnt)), robust_mode)
    goal_state, info = mdrf.find()
    paths = goal_state.paths

    for aid, p in paths.items():
        p_cells = [x.cell for x in p]
        LOG.info(f'Agent [{aid:02d}], path length: {len(p)}')
        print(lp.grid.to_str(p_cells, p_cells[0], p_cells[-1], path_chr=str(aid)[0]))

    mdr_makespan = mdr_finder.get_makespan(paths, lp.agents)
    paths_serializer.dump(out_file_name, lp.agents, [paths[a.id] for a in lp.agents], lp.grid)

    LOG.info(f'Original makespan: {original_makespan} | MDR makespan: {mdr_makespan} | MDR info: {info}')

    return info, original_makespan, mdr_makespan
Beispiel #2
0
    def create_agents_routes(self):
        self.all_agents = []

        lp = paths_serializer.load(self.routes_file)
        for curr_route, agnt in zip(lp.paths, lp.agents):
            self.max_steps = max(self.max_steps, len(curr_route))
            self.all_agents.append(Agent(agnt.id, curr_route, agnt.is_adversarial))
Beispiel #3
0
def _stage_25_kamikaze(kamikaze_on_robust_paths_dir, rc, p):
    LOG.info(f'STARTED kamikaze on {p}')
    parts = p.stem.split('-')
    org_path_name = parts[1]
    adv_agent_id = int(parts[2].split('_')[1])
    adv_agent_ds = int(parts[3].split('_')[1])
    robust_radius = int(parts[4].split('_')[1])

    lp = paths_serializer.load(p)

    result_row = _stage_25_Result(
        rc.map_file_name,
        p.name,
        adv_agent_id,
        adv_agent_ds,
        robust_radius
    )

    for robust_mode in [RobustPathMode.OFFLINE]:
        kamikaze_route_path = kamikaze_on_robust_paths_dir / f'kamikaze-{robust_mode.name}-{org_path_name}-agent_{adv_agent_id}-ds_{adv_agent_ds}-rr_{robust_radius}{p.suffix}'
        kamikaze_paths = None
        try:
            with benchmark_utils.time_it(f'Running kamikaze with robust_mode={robust_mode}') as ti:
                kp = kamikaze_planner.KamikazePlanner(
                    lp.grid,
                    lp.agents,
                    astar.Searcher,
                    lambda agnt: dict(
                        h_func=mapf_heuristics.get_good_manhatten_like_heuristic(agnt)
                    ),
                    robust_mode=robust_mode,
                    robust_radius=robust_radius
                )
                goal_state, plan_metadata = kp.find()

            assert  isinstance(goal_state, kamikaze_planner.KamikazeState)
            kamikaze_paths = goal_state.paths
            result_row.is_kamikaze_successful = True
            result_row.kamikaze_cost = goal_state.g()
            result_row.kamikaze_expanded_nodes = plan_metadata.expanded_states
            result_row.kamikaze_visited_nodes = plan_metadata.visited_states
            result_row.kamikaze_run_time_seconds = ti.getElapsed()

            collision = goal_state.get_collision()
            result_row.collision_target_agent_id = collision.target_agent_id
            result_row.collision_step = collision.step
            result_row.collision_cell = str(collision.cell)
        except kamikaze_planner.NotFoundError:
            result_row.is_kamikaze_successful = False
        except Exception as e:
            LOG.error(e, exc_info=True)
            result_row.comment = str(e)

        paths_serializer.dump(kamikaze_route_path, lp.agents, [kamikaze_paths[a.id] for a in lp.agents] if kamikaze_paths else None,
                              lp.grid, metadata=vars(result_row))

    LOG.info(f'FINISHED kamikaze on {p}')

    return result_row
Beispiel #4
0
def _stage_3_normal_mdr(mdr_on_normal_paths_dir, adv_agent_id, adv_agent_ds, rc, p):
    LOG.info(f'STARTED MDR on {p} with agent [{adv_agent_id}] and DS={adv_agent_ds}')
    lp = paths_serializer.load(p)
    adv_agent = [a for a in lp.agents if a.id == adv_agent_id][0]

    adv_agent.is_adversarial = True
    adv_agent.damage_steps = adv_agent_ds
    for a in lp.agents:
        if a is not adv_agent:
            a.is_adversarial = False
            a.damage_steps = 0

    paths = {a.id: a.path for a in lp.agents}
    original_makespan = mdr_finder.get_makespan(paths, lp.agents)
    mdr_route_path = mdr_on_normal_paths_dir / f'mdr-{p.stem}-agent_{adv_agent.id}-ds_{adv_agent_ds}{p.suffix}'
    mdr_paths = None
    try:
        with benchmark_utils.time_it(f'Running MDR for adv agent:{adv_agent.id}, ds: {adv_agent_ds}') as ti:
            mdrf = mdr_finder.MaxDamageRouteFinder(lp.grid, lp.agents, astar.Searcher, lambda agnt: dict(
                h_func=mapf_heuristics.get_good_manhatten_like_heuristic(agnt)))
            goal_state, mdr_run_info = mdrf.find()
            mdr_paths = goal_state.paths

        mdr_run_time = ti.getElapsed()
        mdr_makespan = mdr_finder.get_makespan(mdr_paths, lp.agents)

        result = _stage_3_Result(
            rc.map_file_name,
            p.name,
            adv_agent.id,
            adv_agent_ds,
            original_makespan,
            mdr_makespan,
            mdr_run_info.expanded_states,
            mdr_run_info.visited_states,
            mdr_run_time,
        )
    except Exception as e:
        LOG.error(e)
        result = _stage_3_Result(
            rc.map_file_name,
            p.name,
            adv_agent.id,
            adv_agent_ds,
            original_makespan,
            comment=str(e)
        )

    paths_serializer.dump(mdr_route_path, lp.agents, [mdr_paths[a.id] for a in lp.agents] if mdr_paths else None, lp.grid,
                          metadata=vars(result))
    return result
Beispiel #5
0
def e2e_parallel(run_config_file_name: str, out_dir: str = None, random_seed=None, permutations=None,
                 map_file_name: str = None, max_adv_agent_ds: int=None):# int = 2):
    logging.getLogger('vgmapf.problems.mapf.multi_agent_pathfinding').setLevel(logging.INFO)

    cores_count = multiprocessing.cpu_count()

    LOG.info(f'Detected {cores_count} cores!')

    print(run_config_file_name)
    #rc1 = config.load('config-simple.yml')
    rc1 = config.load(run_config_file_name)
    if permutations is not None:
        rc1.permutations = permutations
    if map_file_name:
        rc1.map_file_name = map_file_name
    random.seed(random_seed)


    max_adv_agent_ds = rc1.robust_route


    if out_dir is None:
        out_dir = pathlib.Path(__file__).parent / 'outputs' / time_utils.get_current_time_stamp()
    else:
        out_dir = pathlib.Path(out_dir)

    out_dir.mkdir(parents=True, exist_ok=True)
    out_dir1 = out_dir

    grid = grid2d.Grid2D.from_file(pathlib.Path(rc1.map_file_name))
    _update_start_and_goal_cells(rc1, grid)

    print(grid.to_str(start=rc1.start, end=rc1.end))

    start_time = timeit.default_timer()

    max_agents = len(rc1.agents)
    for swarm_ammount in range(2, max_agents+1): #(max_agents, max_agents+1):
        rc = rc1
        # Make folders for number of agents
        swarm_out_dir = out_dir1 / str(len(rc.agents))
        swarm_out_dir.mkdir(parents=True, exist_ok=True)
        out_dir = swarm_out_dir


        # Stage 1 - build normal paths
        LOG.info('START 01 - building normal paths')

        normal_paths_dir = out_dir / STAGE_01_NORMAL_PATHS

        normal_paths_dir.mkdir(parents=True, exist_ok=True)

        tasks = [(normal_paths_dir, grid, rc, permutation_idx) for permutation_idx in range(rc.permutations)]

        with multiprocessing.Pool(processes=cores_count) as pool:
            pool.starmap(_stage_1_normal_paths, tasks)

        LOG.info('FINISH 01 - building normal paths')

        LOG.info('STARTED 02 - run Robust Routes on normal paths')

        # Stage 2 - robust routes

        robust_paths_dir = out_dir / STAGE_02_ROBUST_PATHS
        robust_paths_dir.mkdir(parents=True, exist_ok=True)

        tasks = [(robust_paths_dir, grid, max_adv_agent_ds, p) for p in normal_paths_dir.iterdir()]

        with multiprocessing.Pool(processes=cores_count) as pool:
            pool.starmap(_stage_2_normal_robust, tasks)

        LOG.info('FINISHED 02 - run Robust Routes on normal paths')


        # #Stage 25 - run kamikaze on robust routes
        # LOG.info('STARTED 025 - run kamikaze on robust routes')
        #
        # kamikaze_on_robust_paths_dir = out_dir / STAGE_025_KAMIKAZE
        # kamikaze_on_robust_paths_dir.mkdir(parents=True, exist_ok=True)
        #
        # kamikaze_on_robust_results_summary = kamikaze_on_robust_paths_dir / '025-kamikaze_on_robust_paths.csv'
        #
        # # noinspection DuplicatedCode
        # tasks = [(kamikaze_on_robust_paths_dir, rc, p) for p in robust_paths_dir.iterdir()]
        # with multiprocessing.Pool(processes=cores_count) as pool:
        #     results = pool.starmap(_stage_25_kamikaze, tasks)
        #
        # if results:
        #     with kamikaze_on_robust_results_summary.open('w', newline='') as fresults:
        #         out_csv = csv.DictWriter(fresults, vars(results[0]).keys())
        #         out_csv.writeheader()
        #
        #         for row in results:
        #             try:
        #                 out_csv.writerow(vars(row))
        #             except Exception:
        #                 LOG.warning(f'Failed writing row: {row}', exc_info=True)
        #
        #         fresults.flush()
        #
        # LOG.info('FINISHED 025 - run kamikaze on robust routes')

        # Stage 3 - run MDR on normal paths

        LOG.info('STARTED 03 - run MDR on normal paths')

        mdr_on_normal_paths_dir = out_dir / STAGE_03_MDR_NORMAL_PATHS
        mdr_on_normal_paths_dir.mkdir(parents=True, exist_ok=True)

        mdr_on_normal_results_summary = mdr_on_normal_paths_dir / '03-mdr_on_normal_paths-results.csv'

        #tasks = [
        #    (mdr_on_normal_paths_dir, adv_agent.id, adv_agent_ds, rc, p) for p in normal_paths_dir.iterdir() for adv_agent
        #    in
        #    paths_serializer.load(p).agents for adv_agent_ds in range(1, max_adv_agent_ds + 1)
        #]

        tasks = [
            (mdr_on_normal_paths_dir, adv_agent.id, adv_agent_ds, rc, p) for p in normal_paths_dir.iterdir() for
            adv_agent
            in
            paths_serializer.load(p).agents for adv_agent_ds in range(1, max_adv_agent_ds + 1)
        ]

        LOG.debug(f'stage_3 tasks:\n\t' + '\n\t'.join(str(x) for x in tasks))
        with multiprocessing.Pool(processes=cores_count) as pool:
            results = pool.starmap(_stage_3_normal_mdr, tasks)

        if results:
            with mdr_on_normal_results_summary.open('w', newline='') as fresults:
                out_csv = csv.DictWriter(fresults, vars(results[0]).keys())
                out_csv.writeheader()

                for row in results:
                    try:
                        out_csv.writerow(vars(row))
                    except Exception:
                        LOG.warning( f'Failed writing row: {row}', exc_info=True)

        LOG.info('FINISHED 03 - run MDR on normal paths')

        LOG.info('STARTED 04 - run MDR on robust paths')

        # Stage 4 - MDR on robust paths

        mdr_on_robust_paths_dir = out_dir / STAGE_04_MDR_ROBUST_PATHS
        mdr_on_robust_paths_dir.mkdir(parents=True, exist_ok=True)

        mdr_on_robust_results_summary = mdr_on_robust_paths_dir / '04-mdr_on_robust_paths-results.csv'

        tasks = [(mdr_on_robust_paths_dir, rc, p) for p in robust_paths_dir.iterdir()]
        with multiprocessing.Pool(processes=cores_count) as pool:
            results = pool.starmap(_stage_4_robust_mdr, tasks)

        if results:
            with mdr_on_robust_results_summary.open('w', newline='') as fresults:
                out_csv = csv.DictWriter(fresults, vars(results[0]).keys())
                out_csv.writeheader()

                for row in results:
                    try:
                        out_csv.writerow(vars(row))
                    except Exception:
                        LOG.warning(f'Failed writing row: {row}', exc_info=True)

                fresults.flush()

        end_time = timeit.default_timer()
        LOG.info(
            f'FINISHED 04 - run MDR on robust paths, elapsed:{end_time - start_time:2f} = {datetime.timedelta(seconds=end_time - start_time)}')
        del rc1.agents[-1]
Beispiel #6
0
def _stage_4_robust_mdr(mdr_on_robust_paths_dir, rc, p):
    # TODO: only calculate robust MDR paths for paths where MDR had a big impact on the original path

    # robust_path-{p.stem}-agent_{adv_agent.id}-ds_{adv_agent_ds}{p.suffix}
    LOG.info(f'STARTED robust MDR on {p}')
    parts = p.stem.split('-')
    org_path_name = parts[1]
    adv_agent_id = int(parts[2].split('_')[1])
    adv_agent_ds = int(parts[3].split('_')[1])
    robust_radius = int(parts[4].split('_')[1])

    org_path_file_path = p.parent.parent / STAGE_01_NORMAL_PATHS / (org_path_name + p.suffix)
    org_lp = paths_serializer.load(org_path_file_path)
    org_makespan = org_lp.get_makespan()

    lp = paths_serializer.load(p)

    mdr_info = {}
    paths = {a.id: a.path for a in lp.agents}
    original_robust_makespan = mdr_finder.get_makespan(paths, lp.agents)

    result_row = _stage_4_Result(
        rc.map_file_name,
        p.name,
        adv_agent_id,
        adv_agent_ds,
        org_makespan,
        original_robust_makespan,
        robust_radius
    )

    for robust_mode in [RobustPathMode.ONLINE_CONST]:
        mdr_route_path = mdr_on_robust_paths_dir / f'mdr-{robust_mode.name}-{org_path_name}-agent_{adv_agent_id}-ds_{adv_agent_ds}-rr_{robust_radius}{p.suffix}'
        mdr_paths = None
        try:
            with benchmark_utils.time_it(f'Running MDR with robust_mode={robust_mode}') as ti:
                mdrf = mdr_finder.MaxDamageRouteFinder(
                    lp.grid,
                    lp.agents,
                    astar.Searcher,
                    lambda agnt: dict(
                        h_func=mapf_heuristics.get_good_manhatten_like_heuristic(agnt)
                    ),
                    robust_mode=robust_mode,
                    robust_radius=robust_radius
                )
                goal_state, mdr_run_info = mdrf.find()
                mdr_paths = goal_state.paths

            # ms_mdr_online_path: int = -1
            # mdr_online_expanded_nodes: int = -1
            # mdr_online_visited_nodes: int = -1
            # mdr_online_run_time_seconds: float = 0.0
            mdr_results = {
                f'ms_mdr_{robust_mode.name.lower()}_path': mdr_finder.get_makespan(mdr_paths, lp.agents),
                f'mdr_{robust_mode.name.lower()}_expanded_nodes': mdr_run_info.visited_states,
                f'mdr_{robust_mode.name.lower()}_visited_nodes': mdr_run_info.expanded_states,
                f'mdr_{robust_mode.name.lower()}_run_time_seconds': ti.getElapsed()
            }
            for k, v in mdr_results.items():
                setattr(result_row, k, v)
        except Exception as e:
            LOG.error(e, exc_info=True)
            result_row.comment = str(e)

        paths_serializer.dump(mdr_route_path, lp.agents, [mdr_paths[a.id] for a in lp.agents] if mdr_paths else None, lp.grid, metadata=vars(result_row))

    LOG.info(f'FINISHED robust MDR on {p}')

    return result_row
Beispiel #7
0
def _stage_2_normal_robust(robust_paths_dir, grid, max_adv_agent_ds, p):
    try:
        LOG.info(f'STARTED normal robust on {p}')
        lp = paths_serializer.load(p)
        for org_adv_agent in lp.agents:
            adv_agent_id = org_adv_agent.id
            agents = [a.clone(clear_path=False) for a in lp.agents]

            adv_agent = [a for a in agents if a.id == adv_agent_id][0]

            LOG.info(f'STARTED Robust paths with agent {adv_agent.id}')
            for adv_agent_ds in range(1, max_adv_agent_ds + 1):
                for robust_radius in range(1, 2*adv_agent_ds+1):
                    LOG.info(f'STARTED Robust paths with agent {adv_agent.id} and DS={adv_agent_ds} '
                             f'and Robust Radius={robust_radius}')
                    adv_agent.is_adversarial = True
                    adv_agent.damage_steps = adv_agent_ds

                    for a in agents:
                        if a is not adv_agent:
                            a.is_adversarial = False
                            a.damage_steps = 0
                            a.path = None
                    try:
                        with benchmark_utils.time_it() as t:
                            mf = multi_agent_pathfinding.MapfFinder(grid, agents,
                                                                    adv_agent_radiuses={adv_agent.id: robust_radius})
                            mf.find_paths(astar.Searcher,
                                          lambda agnt: dict(
                                              h_func=mapf_heuristics.get_good_manhatten_like_heuristic(agnt)))
                        mf.validate_paths()

                        out_path = robust_paths_dir / f'robust_path-{p.stem}-agent_{adv_agent.id}-ds_{adv_agent_ds}' \
                                                      f'-rr_{robust_radius}{p.suffix}'
                        mf.save_paths(out_path, metadata=dict(
                            mapf_run_time_sec=t.getElapsed(),
                            makespan=mf.agents_repo.get_makespan(only_non_adversarial=False),
                            adv_radiuses=mf.adv_agent_radiuses,
                            agents=[
                                dict(
                                    id=a.id,
                                    start_cell=a.start_cell,
                                    goal_cell=a.goal_cell,
                                    path_cost=a.path_cost,
                                    path_expanded_nodes=a.expanded_nodes,
                                    motion_equation=a.motion_equation.name,
                                    start_policy=a.start_policy.name,
                                    goal_polciy=a.goal_policy.name,
                                    is_adversarial=a.is_adversarial,
                                    damage_steps=a.damage_steps,
                                )
                                for a in agents
                            ]
                        ))
                    except Exception as e:
                        LOG.error(
                            f'Failed creating robust route for {org_adv_agent}, ds={adv_agent_ds}, rr={robust_radius}:'
                            f' {e}, moving on...')

        LOG.info(f'FINISHED normal robust on {p}')

    except Exception as e:
        LOG.error(e)
Beispiel #8
0
def _extract_result_row(path_file: pathlib.Path):
    lp = paths_serializer.load(path_file)
    row = lp.metadata.copy()
    agents_count = len(lp.agents)
    row['agents_count'] = agents_count
    return row
Beispiel #9
0
def load(paths_file):
    lp = paths_serializer.load(paths_file)
    from IPython import embed;
    embed()