def generate_volcano_simulations(quantity, event_fraction):
    int_path = Path("./int_files")
    simulations_path = int_path / "simulations"
    simulations_path.mkdir(parents=True, exist_ok=True)
    accesses_path = int_path / "accesses"

    eruption_length_range = [12., 120.]  # hours
    eruption_start_range = [0., 168.]  # hours since beginning of simulation
    location_range = ["Kilauea", "Etna", "Piton de la Fournaise", "Stromboli", "Merapi",
                "Erta Ale", "Ol Doinyo Lengai", "Mount Unzen", "Mount Yasur", "Ambrym"]
    speed_range = [0.1, 0.5]  # fraction of time until max eruption
    size_range = [200., 2000.]  # meter radius
    max_tir_temperature_range = [50., 80.]
    max_swir_temperature_range = [5., 15.]
    max_ash_cloud_range = [0.5, 0.9]
    max_terrain_displacement_range = [50., 150.]
    max_so2_levels_range = [1.0, 3.0]

    # For each simulation, sample a value for each parameter, create a simulation, save it under int_files
    for sim_number in range(quantity):
        # Create paths
        simulation_path = simulations_path / f"simulation_{sim_number}"
        if simulation_path.exists():
            shutil.rmtree(simulation_path)
        simulation_path.mkdir(parents=True, exist_ok=True)

        simulation_information_path = simulation_path / "simulation_information.json"
        data_streams_path = simulation_path / "data_streams"

        # Sample values
        eruption_length = random.uniform(*eruption_length_range)
        eruption_start = random.uniform(*eruption_start_range)
        location = random.choice(location_range)
        speed = random.uniform(*speed_range)
        size = random.uniform(*size_range)
        max_tir_temperature = random.uniform(*max_tir_temperature_range)
        max_swir_temperature = random.uniform(*max_swir_temperature_range)
        max_ash_cloud = random.uniform(*max_ash_cloud_range)
        max_terrain_displacement = random.uniform(*max_terrain_displacement_range)
        max_so2_levels = random.uniform(*max_so2_levels_range)

        # Create a mission in the KG
        mission_id = add_volcano_mission(location)

        # Generate accesses if not already there
        access_path = accesses_path / f"{location}.json"
        if not access_path.exists():
            access_times = obtain_access_times(mission_id)
        else:
            access_times = read_access_times(location)

        # Create simulation
        generate_volcano_simulation(mission_id, access_times, eruption_length, eruption_start, location, speed, size,
                            max_tir_temperature, max_swir_temperature, max_ash_cloud, max_terrain_displacement,
                            max_so2_levels, simulation_information_path, data_streams_path)
def generate_forest_fire_simulations(quantity, event_fraction):
    int_path = Path("./int_files")
    simulations_path = int_path / "simulations"
    simulations_path.mkdir(parents=True, exist_ok=True)
    accesses_path = int_path / "accesses"

    eruption_length_range = [12., 120.]  # hours
    eruption_start_range = [0., 168.]  # hours since beginning of simulation
    location_range = ["Spain", "Greece", "California", "Washington", "Kenya"]
    speed_range = [0.1, 0.5]  # fraction of time until max eruption
    size_range = [200., 2000.]  # meter radius
    max_temp_range = [80., 99.]
    max_fire_temp_range = [200., 300.]
    max_cloud_range = [0.5, 0.9]
    max_gases_range = [100, 200]

    # For each simulation, sample a value for each parameter, create a simulation, save it under int_files
    for sim_number in range(quantity):
        # Create paths
        simulation_path = simulations_path / f"simulation_{sim_number}"
        if simulation_path.exists():
            shutil.rmtree(simulation_path)
        simulation_path.mkdir(parents=True, exist_ok=True)

        simulation_information_path = simulation_path / "simulation_information.json"
        data_streams_path = simulation_path / "data_streams"

        # Sample values
        eruption_length = random.uniform(*eruption_length_range)
        eruption_start = random.uniform(*eruption_start_range)
        location = random.choice(location_range)
        speed = random.uniform(*speed_range)
        size = random.uniform(*size_range)
        max_temp = random.uniform(*max_temp_range)
        max_fire_temp = random.uniform(*max_fire_temp_range)
        max_cloud = random.uniform(*max_cloud_range)
        max_gases = random.uniform(*max_gases_range)
        # Create a mission in the KG
        mission_id = add_volcano_mission(location)

        # Generate accesses if not already there
        access_path = accesses_path / f"{location}.json"
        if not access_path.exists():
            access_times = obtain_access_times(mission_id)
        else:
            access_times = read_access_times(location)

        # Create simulation
        generate_forest_fire_simulation(mission_id, eruption_length, eruption_start, location, speed, size,
                                        max_temp, max_fire_temp, max_cloud, max_gases, simulation_information_path)
def generate_flood_simulations(quantity, event_fraction):
    int_path = Path("./int_files")
    simulations_path = int_path / "simulations"
    simulations_path.mkdir(parents=True, exist_ok=True)
    accesses_path = int_path / "accesses"

    eruption_length_range = [12., 120.]  # hours
    eruption_start_range = [0., 168.]  # hours since beginning of simulation
    location_range = ["India", "Bangladesh", "Texas", "Italy", "Brazil"]
    speed_range = [0.1, 0.5]  # fraction of time until max eruption
    size_range = [200., 2000.]  # meter radius
    max_soil_moisture_range = [80., 99.]
    max_precipitation_range = [200., 300.]
    max_land_range = [0.5, 0.9]

    # For each simulation, sample a value for each parameter, create a simulation, save it under int_files
    for sim_number in range(quantity):
        # Create paths
        simulation_path = simulations_path / f"simulation_{sim_number}"
        if simulation_path.exists():
            shutil.rmtree(simulation_path)
        simulation_path.mkdir(parents=True, exist_ok=True)

        simulation_information_path = simulation_path / "simulation_information.json"
        data_streams_path = simulation_path / "data_streams"

        # Sample values
        eruption_length = random.uniform(*eruption_length_range)
        eruption_start = random.uniform(*eruption_start_range)
        location = random.choice(location_range)
        speed = random.uniform(*speed_range)
        size = random.uniform(*size_range)
        max_soil_moisture = random.uniform(*max_soil_moisture_range)
        max_precipitation = random.uniform(*max_precipitation_range)
        max_land = random.uniform(*max_land_range)
        # Create a mission in the KG
        mission_id = add_volcano_mission(location)

        # Generate accesses if not already there
        access_path = accesses_path / f"{location}.json"
        if not access_path.exists():
            access_times = obtain_access_times(mission_id)
        else:
            access_times = read_access_times(location)

        # Create simulation
        generate_flood_simulation(mission_id, eruption_length, eruption_start, location, speed, size,
                            max_soil_moisture, max_precipitation, max_land, simulation_information_path)
Exemplo n.º 4
0
def compute_probabilities():
    paths = Path('./int_files/simulations/')
    simulation_probabilities = {"Full Pipeline": [], "Benchmark Team": []}
    for simulation_path in [p for p in paths.iterdir() if p.is_dir()]:
        simulation_info_path = simulation_path / 'simulation_information.json'
        with simulation_info_path.open() as simulation_info_file:
            simulation_info = json.load(simulation_info_file)

        # Method 1
        # Full process (UniKER - Sensing - Verification)
        location = simulation_info["location"]
        mission_id = simulation_info["mission_id"]
        access_intervals = read_access_times(location)
        print_kg_reasoning_files(mission_id, access_intervals, simulation_path)
        final_path = train_uniker(simulation_path)
        satellite_list = merge_results(final_path)
        shutil.rmtree(simulation_path / "record", ignore_errors=True)

        driver = get_neo4j_driver()
        with driver.session() as session:
            team = get_sensors_from_satellite_list(session, satellite_list)
        team = run_sensor_planner(team, simulation_info)
        team_probs_info_path = simulation_path / 'team_probs.json'
        with team_probs_info_path.open('w') as team_probs_info_file:
            json.dump(team, team_probs_info_file)

        optimal_teams = run_verification(team, simulation_path,
                                         simulation_info, access_intervals)

        simulation_probabilities["Full Pipeline"].append(optimal_teams)
        simulation_results = paths / 'results.json'
        with simulation_results.open('w') as simulation_res_file:
            simulation_res_file.write(str(simulation_probabilities))

        # Method 2

        # ...
    print(simulation_probabilities)
    simulation_results = paths / 'results.json'
    with simulation_results.open('w') as simulation_res_file:
        simulation_res_file.write(str(simulation_probabilities))
        #json.dump(simulation_probabilities, simulation_res_file)
    return simulation_probabilities
def main():
    simulation_path = Path('./int_files/simulations/simulation_0').resolve()
    simulation_info_path = simulation_path / 'simulation_information.json'
    with simulation_info_path.open() as simulation_info_file:
        simulation_info = json.load(simulation_info_file)
    # Method 1
    # Full process (UniKER - Sensing - Verification)
    location = simulation_info["location"]
    mission_id = simulation_info["mission_id"]
    access_intervals = read_access_times(location)
    # ["Sentinel-1 A", "Sentinel-1 B", "GOES-13", "GOES-14", "GOES-15", "GOES-16", "GOES-17", "Aqua", "Terra"]
    satellite_list = [
        "Sentinel-1 A", "Sentinel-1 B", "GOES-15", "GOES-17", "Aqua", "Terra"
    ]

    driver = get_neo4j_driver()
    with driver.session() as session:
        team = get_sensors_from_satellite_list(session, satellite_list)
    team = run_sensor_planner(team, simulation_info)
    team = construct_team_from_list(team)
    team_time = find_time_bounds(team, location, access_intervals)

    print(amy_team(team_time, "probabilities"))
    print(amy_team(team_time, "times"))

    entity_dict, inv_entity_dict = retrieve_entity_dict(driver)

    prefix_list = ['a', 's', 'm']
    a_prefix, s_prefix, m_prefix = prefix_list
    team_time_id = generate_team_time_id(entity_dict, team_time, a_prefix,
                                         s_prefix)

    a_list, s_list = generate_as_lists(team, entity_dict, a_prefix, s_prefix)
    m_list = generate_m_list(team,
                             simulation_path / "simulation_information.json",
                             entity_dict, prefix_list[2])
    num_asm = [len(a_list), len(s_list), len(m_list)]
    num_a, num_s, num_m = num_asm
    print('# of agents, sensors, meas: ', num_asm)

    check_time(team, team_time_id, m_list, entity_dict, s_prefix, m_prefix)

    # relationship matrices
    relation_as = construct_as_matrix(team, entity_dict, num_a, num_s,
                                      a_prefix, s_prefix, a_list, s_list)

    # modules for PRISM MDP
    all_states = all_states_as(num_a, num_s, relation_as, a_list, s_list,
                               team_time_id)
    num_states = len(all_states)  # total number of states

    prism_wsl = (os.environ.get("PRISM_WSL", "yes") == "yes")

    # name of files for PRISM (saved to current directory)
    mission_file = simulation_path / "prop1.txt"  # specification
    mdp_filename = "KG_MDP1.txt"  # MDP
    output_filename = "output1.txt"  # output log
    prism_path = Path(
        os.environ.get("PRISM_PATH", 'D:/Dropbox/darpa_grant/prism/prism/bin'))
    print(prism_path)
    mission_length = 14

    qout = mp.Queue()
    processes = [
        mp.Process(target=parallelize,
                   args=(team, team_time, entity_dict, inv_entity_dict,
                         mission_file, mdp_filename, output_filename,
                         simulation_path, prism_path, m_list, prefix_list, i,
                         qout, prism_wsl)) for i in range(mission_length)
    ]
    for p in processes:
        p.start()

    for p in processes:
        p.join()

    result = []
    teaming = []
    times = []
    for p in processes:
        result_p, teaming_p, time_dict = qout.get()
        result.append(result_p)
        teaming.append(teaming_p)
        times.append(time_dict)

    # merge all teaming dictionaries into one
    teams = {k: v for d in teaming for k, v in d.items()}
    timestep_dict = {k: v for d in times for k, v in d.items()}

    optimal_teaming = pareto_plot_all(result, teams, timestep_dict)
    print('\n ===================== OPTIMAL TEAM ===================== ')
    print(optimal_teaming)
    print(result)