Exemple #1
0
def add_hurricane_locations():
    driver = get_neo4j_driver()

    with driver.session() as session:

        def create_hurricane(tx, name, lat, lon):
            tx.run(
                "CREATE (l1:Location {name: $name, latitude: $lat, longitude: $lon})",
                name=name,
                lat=lat,
                lon=lon)

        # Add locations where measurements need to be made
        session.write_transaction(create_hurricane, "Atlantic1", 18.607736,
                                  -61.288092)
        session.write_transaction(create_hurricane, "Atlantic2", 26.308562,
                                  -75.447379)
        session.write_transaction(create_hurricane, "Atlantic3", 25.957117,
                                  -78.542334)
        session.write_transaction(create_hurricane, "Atlantic4", 27.790243,
                                  -94.164242)
        session.write_transaction(create_hurricane, "Atlantic5", 11.426051,
                                  -81.619162)
        session.write_transaction(create_hurricane, "Pacific1", 33.079810,
                                  143.703265)
        session.write_transaction(create_hurricane, "Pacific2", 22.183588,
                                  136.097657)
        session.write_transaction(create_hurricane, "Pacific3", 14.090973,
                                  130.073797)
        session.write_transaction(create_hurricane, "Pacific4", 6.680097,
                                  130.780933)
        session.write_transaction(create_hurricane, "Pacific5", 0.427483,
                                  138.535974)
def obtain_access_times(mission_id):
    # Connect to database, open session
    driver = get_neo4j_driver()

    with driver.session() as session:
        # 1. Obtain a list of satellites that can participate in the mission from the Knowledge Graph
        satellites = retrieve_available_satellites(mission_id, session)
        # 2. Download the TLEs for the satellites
        satellites = add_tle_information(satellites)
        # 3. Get mission information
        mission = get_mission_information(mission_id, session)
        # 4. Save all required information on a file for Orekit:
        print(mission)
        print_orekit_info(satellites, mission)

    driver.close()
    # 5. Call Orekit and wait for the results before continuing
    jar_path = Path(
        os.environ.get("PROPAGATOR_JAR", "./jar_files/propagator.jar"))
    orekit_process = subprocess.run(
        ["java", "-jar", str(jar_path)], cwd=os.getcwd())

    # 5. Read Orekit results from file and put them into the right format for this code
    java_accesses_path = Path("./int_files/accesses.json")
    accesses_folder = Path("./int_files/accesses")
    accesses_path = accesses_folder / f'{mission["locations"][0]["name"]}.json'
    accesses_folder.mkdir(parents=True, exist_ok=True)
    shutil.copyfile(java_accesses_path, accesses_path)
    with accesses_path.open("r") as accesses_file:
        accesses = json.load(accesses_file)

    # Return a map<Satellite, map<Instrument, map<Location, Intervals>>
    return accesses
Exemple #3
0
def add_volcano_locations():
    driver = get_neo4j_driver()

    with driver.session() as session:

        def create_volcano(tx, name, lat, lon):
            tx.run(
                "CREATE (l1:Location {name: $name, latitude: $lat, longitude: $lon})",
                name=name,
                lat=lat,
                lon=lon)

        # Add locations where measurements need to be made
        session.write_transaction(create_volcano, "Kilauea", 19.4119543,
                                  -155.2747327)
        session.write_transaction(create_volcano, "Etna", 37.7510042,
                                  14.9846801)
        session.write_transaction(create_volcano, "Piton de la Fournaise",
                                  -21.2494387, 55.7112432)
        session.write_transaction(create_volcano, "Stromboli", 38.7918408,
                                  15.1977824)
        session.write_transaction(create_volcano, "Merapi", -7.5407171,
                                  110.4282145)
        session.write_transaction(create_volcano, "Erta Ale", 13.6069145,
                                  40.6529394)
        session.write_transaction(create_volcano, "Ol Doinyo Lengai",
                                  -2.7635781, 35.9056765)
        session.write_transaction(create_volcano, "Mount Unzen", 32.7804497,
                                  130.2497246)
        session.write_transaction(create_volcano, "Mount Yasur", -19.527192,
                                  169.4307231)
        session.write_transaction(create_volcano, "Ambrym", -16.2388854,
                                  168.042517)
Exemple #4
0
def clear_kg():
    driver = get_neo4j_driver()

    with driver.session() as session:
        summary = session.run(
            'MATCH (m:Mission), (obs:Observation), (l:Location) '
            'DETACH DELETE m, obs, l').consume()
        print(summary.counters)
Exemple #5
0
def create_logic():
    driver = get_neo4j_driver()

    evidence = []
    predicates = []
    name_matching = {}
    predicate_types = set()

    with driver.session() as session:
        # Generate evidence and predicates for sensors and platforms
        results = session.run('MATCH (s:Sensor)-[r:HOSTS]-(p:Platform) '
                              'WHERE p.status = "Currently being flown" '
                              'RETURN r')
        save_evidence(session, results, evidence, predicates, predicate_types)

        # Generate evidence and predicates for sensors and observable properties
        results = session.run(
            'MATCH (p:Platform)-[:HOSTS]-(s:Sensor)-[r:OBSERVES]-(op:ObservableProperty) '
            'WHERE p.status = "Currently being flown" '
            'RETURN DISTINCT r')
        save_evidence(session, results, evidence, predicates, predicate_types)

        # Generate evidence and predicates for mission and observations
        results = session.run(
            'MATCH (m:Mission)-[r:REQUIRES]-(ob:Observation) '
            'WHERE m.mid = 1 '
            'RETURN r')
        save_evidence(session, results, evidence, predicates, predicate_types)

        # Generate evidence and predicates for observations and its observable properties
        results = session.run(
            'MATCH (m:Mission)-[:REQUIRES]-(ob:Observation)-[r:OBSERVEDPROPERTY]-(op:ObservableProperty) '
            'WHERE m.mid = 1 '
            'RETURN DISTINCT r')
        save_evidence(session, results, evidence, predicates, predicate_types)

        # Generate evidence and predicates for platforms being in visibility of the target
        visibility_threshold = 0.8
        predicate_types.add('inVisibilityOfTarget')
        predicates.append({
            'type': 'inVisibilityOfTarget',
            'node_types': ['Platform']
        })
        results = session.run('MATCH (p:Platform) '
                              'WHERE p.status = "Currently being flown" '
                              'RETURN p')
        for record in results:
            platform_node = record.value()
            randnum = random()
            if randnum < visibility_threshold:
                evidence.append({
                    'type': 'inVisibilityOfTarget',
                    'elements': [platform_node]
                })

        return evidence, predicates
Exemple #6
0
def add_flood_mission(location):
    driver = get_neo4j_driver()

    with driver.session() as session:
        # Count number of missions to get ID
        result = session.run('MATCH (m:Mission) RETURN count(m) as count')
        mission_count = result.single()[0]

        # Create a sample mission
        mission_id = mission_count + 1
        summary = session.run(
            'CREATE (m:Mission {mid: $mission_id, name: $name, description: $description})',
            mission_id=mission_id,
            name=f"Mission {mission_id} - Active Flood Monitoring",
            description='').consume()
        print(summary.counters)

        # Add the observations that need to be measured
        now_time = datetime.now()
        month_time = now_time + timedelta(days=14)
        summary = session.run(
            'MATCH (op1:ObservableProperty), (op2:ObservableProperty), (op3:ObservableProperty), '
            '(op4:ObservableProperty), (m:Mission) '
            'WHERE op1.name = "Soil moisture at the surface" AND op2.name = "Precipitation Profile (liquid or solid)" '
            'AND op3.name = "Land surface imagery" '
            'AND m.mid = $mission_id '
            'CREATE (o1:Observation {name: $name1, startDate: $start_date, endDate: $end_date, accuracy: $acc1}), '
            '(o2:Observation {name: $name2, startDate: $start_date, endDate: $end_date, accuracy: $acc2}), '
            '(o3:Observation {name: $name3, startDate: $start_date, endDate: $end_date, accuracy: $acc3}), '
            '(m)-[:REQUIRES]->(o1), (m)-[:REQUIRES]->(o2), (m)-[:REQUIRES]->(o3), '
            '(o1)-[:OBSERVEDPROPERTY]->(op1), (o2)-[:OBSERVEDPROPERTY]->(op2), '
            '(o3)-[:OBSERVEDPROPERTY]->(op3), ',
            mission_id=mission_id,
            start_date=now_time,
            end_date=month_time,
            name1='M1 - Soil moisture',
            acc1='1 K',
            name2='M1 - Precipitation intensity',
            acc2='1 K',
            name3='M1 - Land cover',
            acc3='10 % confidence',
            #name5='M1 - Volcano Gases',
            #acc5='0.1'
        ).consume()
        print(summary.counters)

        summary = session.run(
            'MATCH (m:Mission), (l:Location) '
            'WHERE m.mid = $mission_id AND l.name = $location '
            'CREATE (m)-[:HASLOCATION]->(l)',
            mission_id=mission_id,
            location=location).consume()

        print(summary.counters)

        return mission_id
Exemple #7
0
def compute_probabilities():
    paths = Path('./int_files/simulations/')
    simulation_probabilities = {"Full Pipeline": [], "Benchmark Team": []}
    for simulation_path in [p for p in paths.iterdir() if p.is_dir()]:
        simulation_info_path = simulation_path / 'simulation_information.json'
        with simulation_info_path.open() as simulation_info_file:
            simulation_info = json.load(simulation_info_file)

        # Method 1
        # Full process (UniKER - Sensing - Verification)
        location = simulation_info["location"]
        mission_id = simulation_info["mission_id"]
        access_intervals = read_access_times(location)
        print_kg_reasoning_files(mission_id, access_intervals, simulation_path)
        final_path = train_uniker(simulation_path)
        satellite_list = merge_results(final_path)
        shutil.rmtree(simulation_path / "record", ignore_errors=True)

        driver = get_neo4j_driver()
        with driver.session() as session:
            team = get_sensors_from_satellite_list(session, satellite_list)
        team = run_sensor_planner(team, simulation_info)
        team_probs_info_path = simulation_path / 'team_probs.json'
        with team_probs_info_path.open('w') as team_probs_info_file:
            json.dump(team, team_probs_info_file)

        optimal_teams = run_verification(team, simulation_path,
                                         simulation_info, access_intervals)

        simulation_probabilities["Full Pipeline"].append(optimal_teams)
        simulation_results = paths / 'results.json'
        with simulation_results.open('w') as simulation_res_file:
            simulation_res_file.write(str(simulation_probabilities))

        # Method 2

        # ...
    print(simulation_probabilities)
    simulation_results = paths / 'results.json'
    with simulation_results.open('w') as simulation_res_file:
        simulation_res_file.write(str(simulation_probabilities))
        #json.dump(simulation_probabilities, simulation_res_file)
    return simulation_probabilities
Exemple #8
0
def add_flood_locations():
    driver = get_neo4j_driver()

    with driver.session() as session:

        def create_flood(tx, name, lat, lon):
            tx.run(
                "CREATE (l1:Location {name: $name, latitude: $lat, longitude: $lon})",
                name=name,
                lat=lat,
                lon=lon)

        # Add locations where measurements need to be made
        session.write_transaction(create_flood, "India", 28.236019, 77.729279)
        session.write_transaction(create_flood, "Bangladesh", 23.078129,
                                  90.624984)
        session.write_transaction(create_flood, "Texas", 29.695276, -95.351883)
        session.write_transaction(create_flood, "Italy", 45.421771, 12.141446)
        session.write_transaction(create_flood, "Brazil", -3.248639,
                                  -60.007637)
Exemple #9
0
def add_forest_fire_locations():
    driver = get_neo4j_driver()

    with driver.session() as session:

        def create_forest_fire(tx, name, lat, lon):
            tx.run(
                "CREATE (l1:Location {name: $name, latitude: $lat, longitude: $lon})",
                name=name,
                lat=lat,
                lon=lon)

        # Add locations where measurements need to be made
        session.write_transaction(create_forest_fire, "Spain", 41.673299,
                                  1.640078)
        session.write_transaction(create_forest_fire, "Greece", 39.774301,
                                  22.058433)
        session.write_transaction(create_forest_fire, "California", 37.735543,
                                  -120.793321)
        session.write_transaction(create_forest_fire, "Washington", 46.094343,
                                  -121.660813)
        session.write_transaction(create_forest_fire, "Kenya", -0.547029,
                                  40.058512)
def generate_volcano_simulation(mission_id, access_intervals, eruption_length, eruption_start, location, speed, size,
                        max_tir_temperature, max_swir_temperature, max_ash_cloud, max_terrain_displacement,
                        max_so2_levels, simulation_information_path, data_streams_path):
    # For all data, we assume second-by-second collection for a week (24*7*3600 = 604800 data points)
    # We only save the data points for the times when the target is visible for each instrument
    # Furthermore, the volcano eruption starts happening at 3AM on the third day (datapoint 180000)
    # The peak of the eruption is at 8AM (dp 198000)
    # Eruption stops fifth day 12PM (dp 3852000)
    # Data back to nominal after 12 hours (dp 428400)

    fake_data_generators = {location: {}}
    eruption_start_dp = eruption_start*3600
    eruption_max_dp = eruption_start_dp + speed*eruption_length*3600
    eruption_slope_dp = eruption_start_dp + 0.9*eruption_length*3600
    eruption_end_dp = eruption_start_dp + eruption_length*3600
    # Generate TIR data (radiance @ 11 um)
    state_changes = [eruption_start_dp, eruption_max_dp, eruption_slope_dp, eruption_end_dp]
    states = [{"type": "stable", "mean": 10.},
              {"type": "slope", "start": 10., "end": max_tir_temperature},
              {"type": "stable", "mean": max_tir_temperature},
              {"type": "slope", "start": max_tir_temperature, "end": 10.},
              {"type": "stable", "mean": 10.}]
    fake_data_generators[location]["Land surface temperature"] = {
        "changes": state_changes,
        "states": states
    }

    # Generate SWIR data (radiance @ 4 um)
    state_changes = [eruption_start_dp, eruption_max_dp, eruption_slope_dp, eruption_end_dp]
    states = [{"type": "stable", "mean": 0.},
              {"type": "slope", "start": 0., "end": max_swir_temperature},
              {"type": "stable", "mean": max_swir_temperature},
              {"type": "slope", "start": max_swir_temperature, "end": 0.},
              {"type": "stable", "mean": 0.}]
    fake_data_generators[location]["Fire temperature"] = {
        "changes": state_changes,
        "states": states
    }

    # Generate Plume data (prob of ash plume)
    state_changes = [eruption_start_dp, eruption_max_dp, eruption_slope_dp, eruption_end_dp]
    states = [{"type": "stable", "mean": 0.},
              {"type": "slope", "start": 0., "end": max_ash_cloud},
              {"type": "stable", "mean": max_ash_cloud},
              {"type": "slope", "start": max_ash_cloud, "end": 0.},
              {"type": "stable", "mean": 0}]
    fake_data_generators[location]["Cloud type"] = {
        "changes": state_changes,
        "states": states
    }

    # Generate SAR data (mean displacement of terrain in mm)
    state_changes = [eruption_start_dp, eruption_max_dp, eruption_slope_dp, eruption_end_dp]
    states = [{"type": "slope", "start": 0., "end": 20.},
              {"type": "slope", "start": 20., "end": max_terrain_displacement},
              {"type": "slope", "start": max_terrain_displacement, "end": 30.},
              {"type": "slope", "start": 30., "end": 0.},
              {"type": "stable", "mean": 0.}]
    fake_data_generators[location]["Land surface topography"] = {
        "changes": state_changes,
        "states": states
    }

    # Generate SO2 data (Dobson units)
    state_changes = [eruption_start_dp, eruption_max_dp, eruption_slope_dp, eruption_end_dp]
    states = [{"type": "stable", "mean": 0.5},
              {"type": "slope", "start": 0.5, "end": max_so2_levels},
              {"type": "stable", "mean": max_so2_levels},
              {"type": "slope", "start": max_so2_levels, "end": 0.5},
              {"type": "stable", "mean": 0.5}]
    fake_data_generators[location]["Atmospheric Chemistry - SO2 (column/profile)"] = {
        "changes": state_changes,
        "states": states
    }

    # Connect to database, open session
    driver = get_neo4j_driver()

    with driver.session() as session:
        satellites = retrieve_available_satellites(mission_id, session)
        data_locations_json = {}
        for satellite in satellites:
            if satellite["name"] in access_intervals["output"]:
                data_locations_json[satellite["name"]] = {}
                for instrument in satellite["sensors"]:
                    data_locations_json[satellite["name"]][instrument["name"]] = {}
                    for observation in instrument["characteristics"]:
                        data_locations_json[satellite["name"]][instrument["name"]][observation] = {}
                        for location in access_intervals["output"][satellite["name"]][instrument["name"]]:
                            if access_intervals["output"][satellite["name"]][instrument["name"]][location]["timeArray"]:
                                observation_name = slugify(satellite["name"] + "__" + instrument["name"] + "__" + location + "__" + observation)
                                data_locations_json[satellite["name"]][instrument["name"]][observation][location] = observation_name + ".npy"
                                array_path = data_streams_path / f"{observation_name}.npy"
                                #time_array, data_array = generate_fake_timeline(instrument["characteristics"][observation]["Q"],
                                #                                                access_intervals["output"][satellite["name"]][instrument["name"]][location],
                                #                                                fake_data_generators[location][observation])
                                # with open(array_path, 'wb') as f:
                                #     np.save(f, time_array)
                                #     np.save(f, data_array)
                                print(observation_name)

    observable_properties = ["Land surface temperature", "Fire temperature", "Cloud type" , "Land surface topography"]
    with simulation_information_path.open('w', encoding='utf8') as simulation_information_file:
        simulation_information_json = {
            "mission_id": mission_id,
            "length": eruption_length,
            "start": eruption_start,
            "location": location,
            "speed": speed,
            "size": size,
            "max_tir_temperature": max_tir_temperature,
            "max_swir_temperature": max_swir_temperature,
            "max_ash_cloud": max_ash_cloud,
            "max_terrain_displacement": max_terrain_displacement,
            "max_so2_levels": max_so2_levels,
            "data_locations": data_locations_json,
            "observable_properties": observable_properties
        }
        json.dump(simulation_information_json, simulation_information_file)
Exemple #11
0
def run_verification(original_team, simulation_path: Path, simulation_info, access_intervals):
    # data from knowledge graph
    driver = get_neo4j_driver()

    # Save kg with names first, at the end substitute for indices
    with driver.session() as session:
        mission_info = get_mission_information(simulation_info["mission_id"], session)
    path_to_dict = Path('./int_files/output.dict')   
    prism_path = Path(os.environ.get("PRISM_PATH", 'D:/Dropbox/darpa_grant/prism/prism/bin'))
    print(prism_path)
    prism_wsl = (os.environ.get("PRISM_WSL", "yes") == "yes")

    # name of files for PRISM (saved to current directory)
    mission_file = simulation_path / "prop1.txt"             # specification
    mdp_filename = "KG_MDP1.txt"                   # MDP
    output_filename = "output1.txt"            # output log

    # Make paths absolute
    mission_file = mission_file.resolve()
    simulation_path = simulation_path.resolve()

    # Iterate teams until we have a manageable number of states (~1000)
    entity_dict, inv_entity_dict = retrieve_entity_dict(driver)
    num_states = inf
    base_team = copy.deepcopy(original_team)
    num_agents = 15
    while num_states > 1000:
        mission_length = find_mission_length(mission_info)

        base_team = random_team_choice(base_team, num_agents)
        team = construct_team_from_list(base_team)
        target = simulation_info["location"]
        team_time = find_time_bounds(team, target, access_intervals)
        
        prefix_list = ['a', 's', 'm']
        a_prefix, s_prefix, m_prefix = prefix_list
        team_time_id = generate_team_time_id(entity_dict, team_time, a_prefix, s_prefix)
        
        a_list, s_list = generate_as_lists(team, entity_dict, a_prefix, s_prefix)
        m_list = generate_m_list(team, simulation_path / "simulation_information.json", entity_dict, prefix_list[2])
        num_asm = [len(a_list), len(s_list), len(m_list)]
        num_a, num_s, num_m = num_asm
        print('# of agents, sensors, meas: ', num_asm)
        if num_s > 16:
            num_agents -= 1
            continue

        check_time(team, team_time_id, m_list, entity_dict, s_prefix, m_prefix)

        # relationship matrices
        relation_as = construct_as_matrix(team, entity_dict, num_a, num_s, a_prefix, s_prefix, a_list, s_list)

        # modules for PRISM MDP
        all_states = all_states_as(num_a, num_s, relation_as, a_list, s_list, team_time_id)
        num_states = len(all_states)    # total number of states
        print(f"Num agents: {num_agents}; Num states: {num_states}")
        num_agents -= 1
    
    #print(amy_team(team, "probabilities"))
    #print(amy_team(team, "times"))
    prefix_list = ['a', 's', 'm']
    m_list = generate_m_list(team, simulation_path / "simulation_information.json", entity_dict, prefix_list[2])

    qout = mp.Queue()
    processes = [mp.Process(target=parallelize, args=(team, team_time, entity_dict, inv_entity_dict, mission_file, mdp_filename, output_filename, simulation_path, prism_path, m_list, prefix_list, i, qout, prism_wsl)) for i in range(mission_length)]
    for p in processes:
        p.start()

    for p in processes:
        p.join()

    result = []
    teaming = []
    times = []
    for p in processes:
        result_p, teaming_p, time_dict = qout.get()
        result.append(result_p)
        teaming.append(teaming_p)
        times.append(time_dict)

    # merge all teaming dictionaries into one
    teams = {k: v for d in teaming for k, v in d.items()}
    timestep_dict = {k: v for d in times for k, v in d.items()}

    optimal_teaming = pareto_plot_all(result, teams, timestep_dict)
    print('\n ===================== OPTIMAL TEAM ===================== ')
    #print(result, teams)
    print(optimal_teaming)

    return optimal_teaming
def print_kg_reasoning_files(mission_id, access_intervals,
                             simulation_path: Path):
    # Generate the Knowledge Base relationship by relationship, saving the entities in a set to later generate the
    # dictionary

    # Connect to database, open session
    driver = get_neo4j_driver()

    entities = set()
    relations = set()
    kg = []

    # Save kg with names first, at the end substitute for indices
    with driver.session() as session:
        # HOSTS
        # isInstanceOf
        satellites_info = get_all_active_satellites_with_instruments(session)
        relations.add("HOSTS")
        relations.add("isInstanceOf")
        for satellite in satellites_info:
            sat_name = satellite["name"]
            entities.add(sat_name)
            for sensor in satellite["sensors"]:
                sensor_name = sensor["name"]
                sensor_instance_name = f"{sat_name}|{sensor_name}"
                entities.add(sensor_instance_name)
                kg.append({
                    "head": sat_name,
                    "relationship": "HOSTS",
                    "tail": sensor_instance_name
                })
                entities.add(sensor_name)
                kg.append({
                    "head": sensor_instance_name,
                    "relationship": "isInstanceOf",
                    "tail": sensor_name
                })

        # MEASURES
        relations.add("OBSERVES")
        measures_relations = get_observes_relationships(session)
        for relation in measures_relations:
            entities.add(relation["head"])
            entities.add(relation["tail"])
        kg.extend(measures_relations)

        # OBSERVEDPROPERTY
        relations.add("OBSERVEDPROPERTY")
        observedproperty_relations = get_observedproperty_relations(session)
        for relation in observedproperty_relations:
            entities.add(relation["head"])
            entities.add(relation["tail"])
        kg.extend(observedproperty_relations)

        # REQUIRES
        relations.add("REQUIRES")
        requires_relations = get_requires_relations(mission_id, session)
        for relation in requires_relations:
            entities.add(relation["head"])
            entities.add(relation["tail"])
        kg.extend(requires_relations)

        # HASLOCATION
        relations.add("HASLOCATION")
        haslocation_relations = get_haslocation_relations(mission_id, session)
        for relation in haslocation_relations:
            entities.add(relation["head"])
            entities.add(relation["tail"])
        kg.extend(haslocation_relations)

        # inVisibilityOfTarget
        relations.add("inVisibilityOfTarget")
        for sat_name, sat_info in access_intervals["output"].items():
            for instr_name, instr_info in sat_info.items():
                for target_name, accesses in instr_info.items():
                    if len(accesses["timeArray"]) > 0:
                        sensor_instance_name = sat_name + "|" + instr_name
                        entities.add(sensor_instance_name)
                        entities.add(target_name)
                        kg.append({
                            "head": sensor_instance_name,
                            "relationship": "inVisibilityOfTarget",
                            "tail": target_name
                        })

        # SENSORTYPE
        relations.add("SENSORTYPE")
        sensortype_relations = get_sensortype_relations(session)
        for relation in sensortype_relations:
            entities.add(relation["head"])
            entities.add(relation["tail"])
        kg.extend(sensortype_relations)

        # SENSORBAND
        relations.add("SENSORBAND")
        sensorband_relations = get_sensorband_relations(session)
        for relation in sensorband_relations:
            entities.add(relation["head"])
            entities.add(relation["tail"])
        kg.extend(sensorband_relations)

        # SENSORRULE
        relations.add("SENSORRULE")
        sensorrule_relations = get_sensorrule_relations(session)
        for relation in sensorrule_relations:
            entities.add(relation["head"])
            entities.add(relation["tail"])
        kg.extend(sensorrule_relations)

        # TYPEOBSERVES
        relations.add("TYPEOBSERVES")
        typeobserves_relations = get_typeobserves_relations(session)
        for relation in typeobserves_relations:
            entities.add(relation["head"])
            entities.add(relation["tail"])
        kg.extend(typeobserves_relations)

        relations.add("canParticipate")
        ground_truth = retrieve_available_satellites(mission_id, session)
        mission_info = get_mission_information(mission_id, session)

    # Print a file with a relation between entities and indices
    entities_dict_path = simulation_path / "entities.dict"
    inv_entity_dict = {}
    with entities_dict_path.open('w', encoding='utf8') as entities_dict_file:
        for idx, entity in enumerate(entities):
            entities_dict_file.write(f"{idx}\t{entity}\n")
            inv_entity_dict[entity] = idx

    # Print a file with a relation between predicates and indices
    relations_dict_path = simulation_path / "relations.dict"
    inv_relation_dict = {}
    with relations_dict_path.open('w', encoding='utf8') as relations_dict_file:
        for idx, relation in enumerate(relations):
            relations_dict_file.write(f"{idx}\t{relation}\n")
            inv_relation_dict[relation] = idx

    # Print the knowledge base into a file
    kg_path = simulation_path / "train.txt"
    kg_val_path = simulation_path / "valid.txt"
    train_val_split = 0.1
    with kg_path.open('w', encoding='utf8') as kg_file, kg_val_path.open(
            'w', encoding='utf8') as kg_val_file:
        for fact in kg:
            if random.random() < train_val_split:
                kg_file.write(
                    f'{fact["head"]}\t{fact["relationship"]}\t{fact["tail"]}\n'
                )
                kg_val_file.write(
                    f'{fact["head"]}\t{fact["relationship"]}\t{fact["tail"]}\n'
                )
            else:
                kg_file.write(
                    f'{fact["head"]}\t{fact["relationship"]}\t{fact["tail"]}\n'
                )

    # Print a file with the logic rules
    src_rules_path = Path("./knowledge_reasoning/MLN_rule.txt")
    dst_rules_path = simulation_path / "MLN_rule.txt"
    shutil.copy(src_rules_path, dst_rules_path)
    (simulation_path / "final_rules").mkdir(exist_ok=True)
    shutil.copy(Path("./knowledge_reasoning/fc_observation.txt"),
                simulation_path / "final_rules" / "fc_observation.txt")
    shutil.copy(Path("./knowledge_reasoning/fc_visibility.txt"),
                simulation_path / "final_rules" / "fc_visibility.txt")

    # Print a ground truth with the set of satellites we know have a chance of participating at all
    ground_truth_path = simulation_path / "test.txt"
    with ground_truth_path.open('w', encoding='utf8') as ground_truth_file:
        for satellite in ground_truth:
            ground_truth_file.write(
                f'{satellite["name"]}\tcanParticipate\t{mission_info["name"]}\n'
            )
Exemple #13
0
def add_forest_fire_mission(location):
    driver = get_neo4j_driver()

    with driver.session() as session:
        # Count number of missions to get ID
        result = session.run('MATCH (m:Mission) RETURN count(m) as count')
        mission_count = result.single()[0]

        # Create a sample mission
        mission_id = mission_count + 1
        summary = session.run(
            'CREATE (m:Mission {mid: $mission_id, name: $name, description: $description})',
            mission_id=mission_id,
            name=f"Mission {mission_id} - Active Forest Fire Monitoring",
            description='').consume()
        print(summary.counters)

        # Add the observations that need to be measured
        now_time = datetime.now()
        month_time = now_time + timedelta(days=14)
        summary = session.run(
            'MATCH (op1:ObservableProperty), (op2:ObservableProperty), (op3:ObservableProperty), '
            '(op4:ObservableProperty), (m:Mission) '
            'WHERE op1.name = "Land surface temperature" AND op2.name = "Fire temperature" '
            'AND op3.name = "Cloud type" AND op4.name = "Trace gases (excluding ozone)" '
            #'AND op5.name = "Atmospheric Chemistry - SO2 (column/profile)" '
            'AND m.mid = $mission_id '
            'CREATE (o1:Observation {name: $name1, startDate: $start_date, endDate: $end_date, accuracy: $acc1}), '
            '(o2:Observation {name: $name2, startDate: $start_date, endDate: $end_date, accuracy: $acc2}), '
            '(o3:Observation {name: $name3, startDate: $start_date, endDate: $end_date, accuracy: $acc3}), '
            '(o4:Observation {name: $name4, startDate: $start_date, endDate: $end_date, accuracy: $acc4}), '
            #'(o5:Observation {name: $name5, startDate: $start_date, endDate: $end_date, accuracy: $acc5}), '
            '(m)-[:REQUIRES]->(o1), (m)-[:REQUIRES]->(o2), (m)-[:REQUIRES]->(o3), '
            '(m)-[:REQUIRES]->(o4), '
            #'(m)-[:REQUIRES]->(o5), '
            '(o1)-[:OBSERVEDPROPERTY]->(op1), (o2)-[:OBSERVEDPROPERTY]->(op2), '
            '(o3)-[:OBSERVEDPROPERTY]->(op3), (o4)-[:OBSERVEDPROPERTY]->(op4) ',
            #'(o5)-[:OBSERVEDPROPERTY]->(op5)',
            mission_id=mission_id,
            start_date=now_time,
            end_date=month_time,
            name1='M1 - Temperature (TIR)',
            acc1='1 K',
            name2='M1 - Temperature (SWIR)',
            acc2='1 K',
            name3='M1 - Smoke',
            acc3='10 % confidence',
            name4='M1 - CO Trace',
            acc4='10 cm',
            #name5='M1 - Volcano Gases',
            #acc5='0.1'
        ).consume()
        print(summary.counters)

        summary = session.run(
            'MATCH (m:Mission), (l:Location) '
            'WHERE m.mid = $mission_id AND l.name = $location '
            'CREATE (m)-[:HASLOCATION]->(l)',
            mission_id=mission_id,
            location=location).consume()

        print(summary.counters)

        return mission_id
Exemple #14
0
def display_simulation_results(simulation_probabilities):
    cdf_line = None
    cdf2_line = None

    plt.ion()
    figure = plt.figure(constrained_layout=True, figsize=(15, 8))
    widths = [1, 4, 1]
    heights = [2, 1]
    gs = figure.add_gridspec(ncols=3,
                             nrows=2,
                             width_ratios=widths,
                             height_ratios=heights)
    earth_axes = figure.add_subplot(gs[0, 1])
    earth_axes.set_title('Eruption locations and sizes')
    earth_axes.set_xlabel('Longitude (deg)')
    earth_axes.set_ylabel('Latitude (deg)')
    sim_info = figure.add_subplot(gs[0, 2])
    sim_info.axis('off')
    sim_text = sim_info.text(0.05,
                             0.95,
                             "",
                             transform=sim_info.transAxes,
                             fontsize=12,
                             verticalalignment='top')

    cdf_axes = figure.add_subplot(gs[1, 1])
    cdf_axes.set_title('Montecarlo Results')
    cdf_axes.set_xlabel('Simulation number')
    cdf_axes.set_ylabel('Probability of mission success')
    cdf_info = figure.add_subplot(gs[1, 2])
    cdf_info.axis('off')
    cdf_text = cdf_info.text(0.05,
                             0.95,
                             "",
                             transform=cdf_info.transAxes,
                             fontsize=12,
                             verticalalignment='top')

    mng = plt.get_current_fig_manager()
    #mng.window.state('zoomed')  # works fine on Windows!
    plt.show()

    path = geopandas.datasets.get_path('naturalearth_lowres')
    earth_info = geopandas.read_file(path)
    earth_info.plot(ax=earth_axes, facecolor='none', edgecolor='black')

    simulations_path = Path("./int_files/simulations/")

    # Connect to database, open session
    driver = get_neo4j_driver()

    # Updates
    success_probs = []
    success_probs_bench = []
    x_axis = []

    for simulation_idx, folder in enumerate(
        [x for x in simulations_path.iterdir() if x.is_dir()]):
        simulation_path = folder / "simulation_information.json"
        with simulation_path.open("r") as simulation_file:
            simulation_info = json.load(simulation_file)

        with driver.session() as session:
            result = session.run(
                'MATCH (l:Location) '
                'WHERE l.name=$name RETURN DISTINCT l;',
                name=simulation_info["location"])
            record = result.single()
            location_info = {
                "name": record["l"]["name"],
                "latitude": record["l"]["latitude"],
                "longitude": record["l"]["longitude"]
            }

        earth_axes.add_artist(
            plt.Circle((location_info["longitude"], location_info["latitude"]),
                       simulation_info["size"] * 0.1 / 10000 * 300,
                       ec="red",
                       fill=True,
                       fc="orange"))

        sim_text.set_text(generate_simulation_text_info(simulation_info))

        # Compute probs for demo video
        success_probs_bench.append(0.2)
        success_probs_bench.sort()
        success_probs.append(
            simulation_probabilities["Full Pipeline"][simulation_idx])
        success_probs.sort()

        x_axis.append(simulation_idx)

        if cdf_line is None:
            cdf_line = cdf_axes.plot(x_axis,
                                     success_probs,
                                     marker='.',
                                     linestyle='',
                                     label="Full Pipeline")[0]
        cdf_line.set_data(x_axis, success_probs)

        if cdf2_line is None:
            cdf2_line = cdf_axes.plot(x_axis,
                                      success_probs_bench,
                                      color="red",
                                      label="Benchmark Team")[0]
        cdf2_line.set_data(x_axis, success_probs_bench)

        cdf_actualtext = '\n'.join([
            f"Full Pipeline: {np.mean(success_probs):.5f}",
            f"Benchmark Team: {np.mean(success_probs_bench):.5f}"
        ])
        cdf_text.set_text(cdf_actualtext)

        cdf_axes.legend()
        cdf_axes.relim()
        cdf_axes.autoscale_view()

        # Animation
        figure.canvas.draw_idle()
        figure.canvas.start_event_loop(0.0001)

    figure.canvas.start_event_loop(0)
def main():
    simulation_path = Path('./int_files/simulations/simulation_0').resolve()
    simulation_info_path = simulation_path / 'simulation_information.json'
    with simulation_info_path.open() as simulation_info_file:
        simulation_info = json.load(simulation_info_file)
    # Method 1
    # Full process (UniKER - Sensing - Verification)
    location = simulation_info["location"]
    mission_id = simulation_info["mission_id"]
    access_intervals = read_access_times(location)
    # ["Sentinel-1 A", "Sentinel-1 B", "GOES-13", "GOES-14", "GOES-15", "GOES-16", "GOES-17", "Aqua", "Terra"]
    satellite_list = [
        "Sentinel-1 A", "Sentinel-1 B", "GOES-15", "GOES-17", "Aqua", "Terra"
    ]

    driver = get_neo4j_driver()
    with driver.session() as session:
        team = get_sensors_from_satellite_list(session, satellite_list)
    team = run_sensor_planner(team, simulation_info)
    team = construct_team_from_list(team)
    team_time = find_time_bounds(team, location, access_intervals)

    print(amy_team(team_time, "probabilities"))
    print(amy_team(team_time, "times"))

    entity_dict, inv_entity_dict = retrieve_entity_dict(driver)

    prefix_list = ['a', 's', 'm']
    a_prefix, s_prefix, m_prefix = prefix_list
    team_time_id = generate_team_time_id(entity_dict, team_time, a_prefix,
                                         s_prefix)

    a_list, s_list = generate_as_lists(team, entity_dict, a_prefix, s_prefix)
    m_list = generate_m_list(team,
                             simulation_path / "simulation_information.json",
                             entity_dict, prefix_list[2])
    num_asm = [len(a_list), len(s_list), len(m_list)]
    num_a, num_s, num_m = num_asm
    print('# of agents, sensors, meas: ', num_asm)

    check_time(team, team_time_id, m_list, entity_dict, s_prefix, m_prefix)

    # relationship matrices
    relation_as = construct_as_matrix(team, entity_dict, num_a, num_s,
                                      a_prefix, s_prefix, a_list, s_list)

    # modules for PRISM MDP
    all_states = all_states_as(num_a, num_s, relation_as, a_list, s_list,
                               team_time_id)
    num_states = len(all_states)  # total number of states

    prism_wsl = (os.environ.get("PRISM_WSL", "yes") == "yes")

    # name of files for PRISM (saved to current directory)
    mission_file = simulation_path / "prop1.txt"  # specification
    mdp_filename = "KG_MDP1.txt"  # MDP
    output_filename = "output1.txt"  # output log
    prism_path = Path(
        os.environ.get("PRISM_PATH", 'D:/Dropbox/darpa_grant/prism/prism/bin'))
    print(prism_path)
    mission_length = 14

    qout = mp.Queue()
    processes = [
        mp.Process(target=parallelize,
                   args=(team, team_time, entity_dict, inv_entity_dict,
                         mission_file, mdp_filename, output_filename,
                         simulation_path, prism_path, m_list, prefix_list, i,
                         qout, prism_wsl)) for i in range(mission_length)
    ]
    for p in processes:
        p.start()

    for p in processes:
        p.join()

    result = []
    teaming = []
    times = []
    for p in processes:
        result_p, teaming_p, time_dict = qout.get()
        result.append(result_p)
        teaming.append(teaming_p)
        times.append(time_dict)

    # merge all teaming dictionaries into one
    teams = {k: v for d in teaming for k, v in d.items()}
    timestep_dict = {k: v for d in times for k, v in d.items()}

    optimal_teaming = pareto_plot_all(result, teams, timestep_dict)
    print('\n ===================== OPTIMAL TEAM ===================== ')
    print(optimal_teaming)
    print(result)