def __init__(self, gp_params): """initializes based on parameters initializes based on parameters :param gp_params: global namespace parameters created from input files (possibly with some small non-structural modifications to params). The name spaces here should trace up all the way to the input files. :type params: dict """ scenario_params = gp_params['orbit_prop_params']['scenario_params'] sat_params = gp_params['orbit_prop_params']['sat_params'] rs_general_params = gp_params['gp_general_params'][ 'route_selection_general_params'] rs_params = gp_params['gp_general_params']['route_selection_params_v1'] gp_general_other_params = gp_params['gp_general_params'][ 'other_params'] gp_inst_planning_params = gp_params['gp_instance_params'][ 'route_selection_params'] self.num_sats = sat_params['num_sats'] self.num_paths = rs_params['num_paths'] self.sel_start_utc_dt = tt.iso_string_to_dt( gp_inst_planning_params['start_utc']) self.sel_end_utc_dt = tt.iso_string_to_dt( gp_inst_planning_params['end_utc']) # note: M values should be as low as possible to prevent numerical issues (see: https://orinanobworld.blogspot.com/2011/07/perils-of-big-m.html) self.M_t_s = 86400 # 1 day self.M_dv_Mb = 1000000 # 1000 gigabits self.min_path_dv = rs_params['min_path_dv_Mb'] self.solver_max_runtime = rs_params['solver_max_runtime_s'] self.solver_name = rs_params['solver_name'] self.solver_run_remotely = rs_params['solver_run_remotely'] self.wind_filter_duration = timedelta( seconds=rs_general_params['wind_filter_duration_s']) self.latency_params = gp_general_other_params['latency_calculation'] # quick sanity check on M time value total_duration = (self.sel_end_utc_dt - self.sel_start_utc_dt).total_seconds() if total_duration > self.M_t_s: raise Exception( 'big M value is too small for %f second scheduling window' % (total_duration))
def updateSatState(sat_id, sat_state): # TODO - validate expected formats # # Expected input example: # sat_state = { # "power" : [ SatStateEntry(update_dt=tmp_global_of_caseSpecificParams['sim_run_params']['end_utc_dt']-timedelta(minutes=1), state_info={"batt_e_Wh":12-1, "sat_id_prefix":"S", "sat_id":"S0"} ), # SatStateEntry(update_dt=tmp_global_of_caseSpecificParams['sim_run_params']['end_utc_dt']-timedelta(minutes=2), state_info={"batt_e_Wh":12-2, "sat_id_prefix":"S", "sat_id":"S0"} ) ], # "data" : [ SatStateEntry(update_dt=tmp_global_of_caseSpecificParams['sim_run_params']['end_utc_dt']-timedelta(minutes=1), state_info={"DS_state":500000} ), # SatStateEntry(update_dt=tmp_global_of_caseSpecificParams['sim_run_params']['end_utc_dt']-timedelta(minutes=2), state_info={"DS_state":400000} ) ], # "orbit" : [ SatStateEntry(update_dt=tmp_global_of_caseSpecificParams['sim_run_params']['end_utc_dt']-timedelta(minutes=1), state_info={"a_km" : 7378, # "e" : 0, # "i_deg" : 97.86, # "RAAN_deg" : 0, # "arg_per_deg" : 0, # "M_deg" : 180+5 # } ), # SatStateEntry(update_dt=tmp_global_of_caseSpecificParams['sim_run_params']['end_utc_dt']-timedelta(minutes=2), state_info={"a_km" : 7378, # "e" : 0, # "i_deg" : 97.86, # "RAAN_deg" : 0, # "arg_per_deg" : 0, # "M_deg" : 180+10 # } ) # ] # } if type(sat_state['power'][0][0]) != str: powerState = sat_state[ 'power'] # history to append before archiving (since last update) # list of SatStateEntry(update_dt=telem_dt, state_info=loggedState) # [oldest, ..., most recent] dataState = sat_state[ 'data'] # modeling after power (same as above), less the extra sat ID & such orbitState = sat_state[ 'orbit'] # list of SatStateEntry(update_dt=telem_dt, state_info={ # "a_km" : 7378, # "e" : 0, # "i_deg" : 97.86, # "RAAN_deg" : 0, # "arg_per_deg" : 0, # "M_deg" : 180 # } ) # attitudeState = sat_state['att'] # TODO: Attitude else: # if coming from a pure json encoding, convert string representation to datetimes powerState = [ SatStateEntry(update_dt=tt.iso_string_to_dt(e[0]), state_info=e[1]) for e in sat_state['power'] ] dataState = [ SatStateEntry(update_dt=tt.iso_string_to_dt(e[0]), state_info=e[1]) for e in sat_state['data'] ] orbitState = [ SatStateEntry(update_dt=tt.iso_string_to_dt(e[0]), state_info=e[1]) for e in sat_state['orbit'] ] global server if len(powerState) > 0: server.plan_db.sat_state_hist_by_id[sat_id] = [ entry for entry in server.plan_db.sat_state_hist_by_id[sat_id] if entry.update_dt < powerState[0].update_dt ] + powerState global parameter_updates if len(dataState ) > 0 and 'data_state_Mb' in parameter_updates[sat_id].keys() > 0: # consider update series the truth (on board); all previous entries which fall after its oldest are removed, and the new entries are appended parameter_updates[sat_id]['data_state_Mb'] = [ entry for entry in parameter_updates[sat_id]['data_state_Mb'] if entry.update_dt < dataState[0].update_dt ] + dataState global tmp_global_of_caseSpecificParams if len(orbitState) > 0: reprop = False if parameter_updates[sat_id]['kepler_meananom'] != orbitState[ -1]: # TODO - judge if new & old are substantially diffent enough to reprop reprop = True parameter_updates[sat_id]['kepler_meananom'] = orbitState[-1] for elem in tmp_global_of_caseSpecificParams["orbit_prop_params"][ "orbit_params"][ "sat_orbital_elems"]: # Assumes sats are indv defined (per assertion during init) if elem["sat_id"] == sat_id: elem["kepler_meananom"] = parameter_updates[sat_id][ 'kepler_meananom'] if reprop: repropagateSats() recalculateLinkDatarates() updateWindows()
def loadFromLocalConfig(case_name="ONLINE_OPS"): sim_case_config_FILENAME = PATH_TO_INPUTS + '/cases/' + case_name + '/sim_case_config.json' constellation_config_FILENAME = PATH_TO_INPUTS + '/cases/' + case_name + '/constellation_config.json' gs_network_config_FILENAME = PATH_TO_INPUTS + '/cases/' + case_name + '/ground_station_network_config.json' opsProfile_config_FILENAME = PATH_TO_INPUTS + '/cases/' + case_name + '/operational_profile_config.json' sim_gen_config_FILENAME = PATH_TO_INPUTS + '/general_config/sim_general_config.json' # -------- CASE SPECIFIC CONFIGURATION INPUTS -------- # with open(sim_case_config_FILENAME, 'r') as f: sim_case_config = json.load(f) with open(constellation_config_FILENAME, 'r') as f: constellation_config = json.load(f) with open(gs_network_config_FILENAME, 'r') as f: gs_network_config = json.load(f) with open(opsProfile_config_FILENAME, 'r') as f: opsProfile_config = json.load(f) sat_ref_model_FILENAME = PATH_TO_INPUTS + '/reference_model_definitions/sat_refs/' + constellation_config[ "constellation_definition"]["default_sat_ref_model_name"] + '.json' with open(sat_ref_model_FILENAME, 'r') as f: sat_ref_model = json.load(f) payload_ref_model_FILENAME = PATH_TO_INPUTS + '/reference_model_definitions/payload_refs/' + sat_ref_model[ "sat_model_definition"]["default_payload_ref_model_name"] + '.json' with open(payload_ref_model_FILENAME, 'r') as f: payload_ref_model = json.load(f) # -------- General Config ------ # with open(sim_gen_config_FILENAME, 'r') as f: # move to constructor sim_gen_config = json.load(f) # Peel off outer layer(s) scenario_params = sim_case_config['scenario_params'] general_sim_params = sim_gen_config['general_sim_params'] ops_profile_params = opsProfile_config['ops_profile_params'] constellation_params = constellation_config["constellation_definition"][ 'constellation_params'] gs_network_definition = gs_network_config['network_definition'] sat_model_definition = sat_ref_model['sat_model_definition'] payload_params = payload_ref_model['payload_model_definition'][ 'payload_params'] ######### ---------- Build orbit_prop_params ---------- ######### append_power_params_with_enumeration = { "power_consumption_W": { **sat_model_definition['sat_model_params']['power_params']["power_consumption_W"], "obs": payload_params["power_consumption_W"]["obs"] }, "battery_storage_Wh": sat_model_definition['sat_model_params']['power_params'] ["battery_storage_Wh"], 'sat_ids': constellation_params['sat_ids'], 'sat_id_prefix': constellation_params['sat_id_prefix'], } append_data_params_with_enumeration = { **sat_model_definition['sat_model_params']['data_storage_params'], 'sat_ids': constellation_params['sat_ids'], 'sat_id_prefix': constellation_params['sat_id_prefix'], } append_state_params_with_enumeration = { **sat_model_definition['sat_model_params']['initial_state'], 'sat_ids': constellation_params['sat_ids'], 'sat_id_prefix': constellation_params['sat_id_prefix'], } power_params_by_sat_id, all_sat_ids1 = io_tools.unpack_sat_entry_list( [append_power_params_with_enumeration], output_format='dict') data_storage_params_by_sat_id, all_sat_ids2 = io_tools.unpack_sat_entry_list( [append_data_params_with_enumeration], output_format='dict') initial_state_by_sat_id, all_sat_ids3 = io_tools.unpack_sat_entry_list( [append_state_params_with_enumeration], output_format='dict') if case_name == "ONLINE_OPS": scenario_params['start_utc'] = tt.date_string(datetime.now()) scenario_params['end_utc'] = tt.date_string(datetime.now() + timedelta( minutes=horizon_len_m)) print("Time range: {} -> {}".format(scenario_params['start_utc'], scenario_params['end_utc'])) orbit_prop_params = { 'scenario_params': { 'start_utc': scenario_params[ 'start_utc'], # These duplications accomodate runner_gp.py expectations 'end_utc': scenario_params[ 'end_utc'], # TODO - update runner_gp.py to expect non-duplicated input 'start_utc_dt': tt.iso_string_to_dt(scenario_params['start_utc']), 'end_utc_dt': tt.iso_string_to_dt(scenario_params['end_utc']), 'timestep_s': general_sim_params["timestep_s"] }, 'sat_params': { 'num_sats': constellation_params['num_satellites'], 'num_satellites': constellation_params[ 'num_satellites'], # Duplication to accomodate downstream (runner_gp.py among others) -- TODO: cut out duplication 'sat_id_order': constellation_params['sat_id_order'], 'sat_id_prefix': constellation_params['sat_id_prefix'], 'pl_data_rate': payload_params['payload_data_rate_Mbps'], 'payload_data_rate_Mbps': payload_params[ 'payload_data_rate_Mbps'], # Duplication to accomodate downstream (runner_gp.py among others) -- TODO: cut out duplication 'power_params_by_sat_id': power_params_by_sat_id, 'power_params': [ append_power_params_with_enumeration ], # Duplication to accomodate downstream (runner_gp.py among others) -- TODO: cut out duplication 'data_storage_params_by_sat_id': data_storage_params_by_sat_id, 'data_storage_params': [ append_data_params_with_enumeration ], # Duplication to accomodate downstream (runner_gp.py among others) -- TODO: cut out duplication 'initial_state_by_sat_id': initial_state_by_sat_id, 'initial_state': [ append_state_params_with_enumeration ], # Duplication to accomodate downstream (runner_gp.py among others) -- TODO: cut out duplication 'activity_params': { **sat_model_definition['sat_model_params']["activity_params"], "min_duration_s": { **payload_params["min_duration_s"], **sat_model_definition['sat_model_params']["activity_params"]["min_duration_s"] }, "intra-orbit_neighbor_direction_method": constellation_params["intra-orbit_neighbor_direction_method"] } }, 'gs_params': { 'gs_network_name': gs_network_definition["gs_net_params"]['gs_network_name'], 'num_stations': gs_network_definition["gs_net_params"]["num_stations"], 'num_gs': gs_network_definition["gs_net_params"] ["num_stations"], # TODO: LOL are you serious right now. Get rid of this duplication. 'stations': gs_network_definition["gs_net_params"]["stations"] }, 'obs_params': ops_profile_params['obs_params'], 'orbit_params': { 'sat_ids_by_orbit_name': io_tools.expand_orbits_list(constellation_params['orbit_params'], constellation_params['sat_id_prefix']), 'sat_orbital_elems': constellation_params['orbit_params']['sat_orbital_elems'] }, 'orbit_prop_data': None # orbit_prop_data # now from repropagateSats() } # make the satellite ID order. if the input ID order is default, then will assume that the order is the same as all of the IDs found in the power parameters orbit_prop_params['sat_params'][ 'sat_id_order'] = io_tools.make_and_validate_sat_id_order( # Pay close attention to this, because this is a mutated copy orbit_prop_params['sat_params']['sat_id_order'], orbit_prop_params['sat_params']['sat_id_prefix'], orbit_prop_params['sat_params']['num_sats'], all_sat_ids1) io_tools.validate_ids( validator=orbit_prop_params['sat_params']['sat_id_order'], validatee=all_sat_ids1) io_tools.validate_ids( validator=orbit_prop_params['sat_params']['sat_id_order'], validatee=all_sat_ids2) io_tools.validate_ids( validator=orbit_prop_params['sat_params']['sat_id_order'], validatee=all_sat_ids3) gs_id_order = io_tools.make_and_validate_gs_id_order( orbit_prop_params['gs_params']) orbit_prop_params['gs_params']['gs_id_order'] = gs_id_order orbit_prop_params['gs_params'][ 'elevation_cutoff_deg'] = gs_network_definition["gs_net_params"][ "elevation_cutoff_deg"] obs_target_id_order = io_tools.make_and_validate_target_id_order( orbit_prop_params['obs_params']) orbit_prop_params['obs_params'][ 'obs_target_id_order'] = obs_target_id_order ######### ---------- End Build orbit_prop_params ---------- ######### params = { 'sim_case_config': { 'scenario_params': scenario_params, # simulation.params['sim_case_config'], 'sim_gsn_ID': 'gsn' # self.sim_gsn.ID }, 'orbit_prop_params': orbit_prop_params, # simulation.params['orbit_prop_params'], 'orbit_link_params': { "link_disables": ops_profile_params["link_disables"], "general_link_params": general_sim_params["general_link_params"] }, 'data_rates_params': None, # simulation.params['data_rates_params'], # now from recalculateLinkDatarates() 'sim_run_params': { "start_utc_dt": tt.iso_string_to_dt(scenario_params['start_utc']), "end_utc_dt": tt.iso_string_to_dt(scenario_params['end_utc']) }, 'sat_config': { 'sat_model_definition': sat_model_definition # simulation.params['sat_config'], # 'sat_model_definition' : data['sat_ref_model']['sat_model_definition'] }, 'sim_gen_config': general_sim_params, # simulation.params['sim_gen_config'], 'gp_general_params': tmp_global_of_caseSpecificParams[ 'gp_general_params'] # carrying over from initial setup } return params
from circinus_tools.plotting import plot_tools as pltl from circinus_tools.scheduling.custom_window import ObsWindow from circinus_tools import time_tools as tt with open('injects_zhou.json', 'r') as f: the_json = json.load(f) all_obs = [[] for i in range(len(sats))] for injected in the_json: if not injected['sat_id'] in sats: continue windid = injected['indx'] sat_indx = sats.index(injected['sat_id']) start = tt.iso_string_to_dt(injected['start_utc']) end = tt.iso_string_to_dt(injected['end_utc']) obs = ObsWindow(windid, sat_indx, ['blah'], 0, start, end, wind_obj_type='injected') all_obs[sat_indx].append(obs) plot_params = {} plot_params['route_ids_by_wind'] = None plot_params['plot_start_dt'] = day_start plot_params['plot_end_dt'] = day_end plot_params['base_time_dt'] = day_start
def run(self, data, params): """ Run orbit propagation pipeline element using the inputs supplied per input.json schema. Formats the high level output json and calls various subcomponents for processing :param orbit_prop_data: input json per input.json schema :return: output json per output.json schema """ orbit_prop_data = data['orbit_prop_data'] orbit_prop_inputs = data['orbit_prop_inputs'] viz_params = data['viz_params'] sat_link_history = data['sat_link_history'] gp_history = data['gp_history'] display_link_info = data['display_link_info'] history_input_option = params['history_input_option'] cz = CzmlWrapper() satellite_callbacks = None renderers_list = None renderer_mapping = None if viz_params['version'] == "0.1": orbit_time_precision = viz_params['orbit_time_precision_s'] satellite_callbacks = viz_params['satellite_callbacks'] renderers_list = [rndr for rndr in viz_params['available_renderers'].values()] renderer_mapping = viz_params['selected_renderer_mapping'] else: raise NotImplementedError # initializing these here so they will be in scope for use down below num_sats = None num_gs = None num_targets = None gs_names= None start_utc_dt = None end_utc_dt = None sat_id_order = None gs_id_order = None if orbit_prop_inputs['version'] == "0.8": scenario_params = orbit_prop_inputs['scenario_params'] start_utc_dt =tt.iso_string_to_dt (scenario_params['start_utc'] ) end_utc_dt = tt.iso_string_to_dt (scenario_params['end_utc'] ) num_sats = orbit_prop_inputs['sat_params']['num_satellites'] num_gs = orbit_prop_inputs['gs_params']['num_stations'] num_targets = orbit_prop_inputs['obs_params']['num_targets'] # add ground stations to czml file gs_params = orbit_prop_inputs['gs_params'] gs_names = [gs['name'] for gs in gs_params['stations']] num_satellites = orbit_prop_inputs['sat_params']['num_satellites'] sat_id_prefix = orbit_prop_inputs['sat_params']['sat_id_prefix'] sat_id_order= orbit_prop_inputs['sat_params']['sat_id_order'] # in the case that this is default, then we need to grab a list of all the satellite IDs. We'll take this from all of the satellite IDs found in the orbit parameters if sat_id_order == 'default': dummy, all_sat_ids = io_tools.unpack_sat_entry_list( orbit_prop_inputs['orbit_params']['sat_orbital_elems'],force_duplicate = True) # make the satellite ID order. if the input ID order is default, then will assume that the order is the same as all of the IDs passed as argument sat_id_order = io_tools.make_and_validate_sat_id_order(sat_id_order,sat_id_prefix,num_satellites,all_sat_ids) io_tools.validate_ids(validator=sat_id_order,validatee=all_sat_ids) gs_id_order = io_tools.make_and_validate_gs_id_order(orbit_prop_inputs['gs_params']) for station in gs_params['stations']: cz.make_gs( gs_id=station ['id'], name=station['name'], name_pretty=station['name_pretty'], start_utc=scenario_params['start_utc'], end_utc=scenario_params['end_utc'], lat_deg=station['latitude_deg'], lon_deg=station['longitude_deg'], h_m=station['height_m'] ) # add observation targets to czml file obs_params = orbit_prop_inputs['obs_params'] for targ in obs_params['targets']: cz.make_obs_target( targ_id=targ ['id'], name=targ['name'], name_pretty=targ['name_pretty'], start_utc=scenario_params['start_utc'], end_utc=scenario_params['end_utc'], lat_deg=targ['latitude_deg'], lon_deg=targ['longitude_deg'], h_m=targ['height_m'] ) else: raise NotImplementedError if orbit_prop_data['version'] == "0.3": scenario_params = orbit_prop_data['scenario_params'] sat_orbit_data = orbit_prop_data['sat_orbit_data'] # add czml file document header name = "sats_file_"+datetime.utcnow().isoformat() cz.make_doc_header(name,scenario_params['start_utc'],scenario_params['end_utc']) # add satellites to czml file for sat_indx,elem in enumerate ( sat_orbit_data): cz.make_sat( sat_id = elem['sat_id'], name=elem['sat_id'], name_pretty=elem['sat_id'], start_utc=scenario_params['start_utc'], end_utc=scenario_params['end_utc'], orbit_t_r= elem['time_s_pos_eci_km'], orbit_epoch= scenario_params['start_utc'], orbit_time_precision=orbit_time_precision, orbit_pos_units_mult = 1000, callbacks =satellite_callbacks ) else: raise NotImplementedError # now make all the cesium outputs print (history_input_option) viz_data =self.consolidate_viz_data(sat_link_history, gp_history, option =history_input_option) cz.json_metadata['input_data_updated'] = viz_data.get('update_time', None) cz.make_downlinks( viz_data['dlnk_times_flat'], viz_data['dlnk_partners'], num_sats, num_gs, gs_names, sat_id_order, gs_id_order, start_utc_dt, end_utc_dt ) cz.make_crosslinks( viz_data['xlnk_times_flat'], viz_data['xlnk_partners'], num_sats, sat_id_order, start_utc_dt, end_utc_dt ) cz.make_observations( viz_data['obs_times_flat'], num_sats, sat_id_order, start_utc_dt, end_utc_dt ) dlnk_rate_history_epoch_dt= tt.iso_string_to_dt (viz_data['dlnk_rate_history_epoch']) cz.make_downlink_rates( viz_data['dlnk_rate_history'], dlnk_rate_history_epoch_dt, num_sats, num_gs, sat_id_order, gs_id_order, end_utc_dt ) if viz_data.get ('dlnk_link_info_history_flat') and display_link_info: cz.make_downlink_link_info( viz_data['dlnk_link_info_history_flat'], viz_data['dlnk_partners'], num_sats, num_gs, sat_id_order, gs_id_order, start_utc_dt, end_utc_dt ) xlnk_rate_history_epoch_dt= tt.iso_string_to_dt (viz_data['xlnk_rate_history_epoch']) cz.make_crosslink_rates( viz_data['xlnk_rate_history'], xlnk_rate_history_epoch_dt, num_sats, sat_id_order, end_utc_dt ) if viz_data.get ('xlnk_link_info_history_flat') and display_link_info: cz.make_crosslink_link_info( viz_data['xlnk_link_info_history_flat'], viz_data['xlnk_partners'], num_sats, sat_id_order, start_utc_dt, end_utc_dt ) renderer_description = cz.get_renderer_description( renderers_list, renderer_mapping, num_sats, num_gs, sat_id_order, gs_id_order, viz_data['dlnk_rate_history'], viz_data['xlnk_rate_history'], viz_data.get ('dlnk_link_info_history_flat',[]), viz_data['dlnk_partners'], viz_data.get ('xlnk_link_info_history_flat',[]), viz_data['xlnk_partners'], ) return cz.get_czml(), cz.get_viz_objects (), renderer_description
def run(self, data): """ """ # Here we're stripping everything off down to whichever layer is relevant from hereon out sim_case_config = data['sim_case_config']['scenario_params'] constellation_config = data['constellation_config'][ 'constellation_definition']['constellation_params'] gs_network_config = data['gs_network_config']["network_definition"] gs_ref_model = data['gs_ref_model']['gs_model_definition'] sat_ref_model = data['sat_ref_model']['sat_model_definition'] payload_ref_model = data['payload_ref_model'][ 'payload_model_definition']['payload_params'] opsProfile_config = data['opsProfile_config']['ops_profile_params'] sim_gen_config = data['sim_gen_config']["general_sim_params"] gp_general_params = data['gp_general_params'] lp_general_params = data['lp_general_params'] data_rates_params = data['data_rates_inputs'] orbit_prop_data = data['orbit_prop_data'] # TODO - because we split this apart in our new specification, we have to join part of it back together here. This will be reformed as needed. append_power_params_with_enumeration = { "power_consumption_W": { **sat_ref_model['sat_model_params']['power_params']["power_consumption_W"], "obs": payload_ref_model["power_consumption_W"]["obs"] }, "battery_storage_Wh": sat_ref_model['sat_model_params']['power_params'] ["battery_storage_Wh"], 'sat_ids': constellation_config['sat_ids'], 'sat_id_prefix': constellation_config['sat_id_prefix'], } append_data_params_with_enumeration = { **sat_ref_model['sat_model_params']['data_storage_params'], 'sat_ids': constellation_config['sat_ids'], 'sat_id_prefix': constellation_config['sat_id_prefix'], } append_state_params_with_enumeration = { **sat_ref_model['sat_model_params']['initial_state'], 'sat_ids': constellation_config['sat_ids'], 'sat_id_prefix': constellation_config['sat_id_prefix'], } power_params_by_sat_id, all_sat_ids1 = io_tools.unpack_sat_entry_list( [append_power_params_with_enumeration], output_format='dict') data_storage_params_by_sat_id, all_sat_ids2 = io_tools.unpack_sat_entry_list( [append_data_params_with_enumeration], output_format='dict') initial_state_by_sat_id, all_sat_ids3 = io_tools.unpack_sat_entry_list( [append_state_params_with_enumeration], output_format='dict') orbit_prop_inputs = { 'scenario_params': { 'start_utc': sim_case_config[ 'start_utc'], # These duplications accomodate runner_gp.py expectations 'end_utc': sim_case_config[ 'end_utc'], # TODO - update runner_gp.py to expect non-duplicated input 'start_utc_dt': tt.iso_string_to_dt(sim_case_config['start_utc']), 'end_utc_dt': tt.iso_string_to_dt(sim_case_config['end_utc']), 'timestep_s': sim_gen_config["timestep_s"] }, 'sat_params': { 'num_sats': constellation_config['num_satellites'], 'num_satellites': constellation_config[ 'num_satellites'], # Duplication to accomodate downstream (runner_gp.py among others) -- TODO: cut out duplication 'sat_id_order': constellation_config['sat_id_order'], 'sat_id_prefix': constellation_config['sat_id_prefix'], 'pl_data_rate': payload_ref_model['payload_data_rate_Mbps'], 'payload_data_rate_Mbps': payload_ref_model[ 'payload_data_rate_Mbps'], # Duplication to accomodate downstream (runner_gp.py among others) -- TODO: cut out duplication 'power_params_by_sat_id': power_params_by_sat_id, 'power_params': [ append_power_params_with_enumeration ], # Duplication to accomodate downstream (runner_gp.py among others) -- TODO: cut out duplication 'data_storage_params_by_sat_id': data_storage_params_by_sat_id, 'data_storage_params': [ append_data_params_with_enumeration ], # Duplication to accomodate downstream (runner_gp.py among others) -- TODO: cut out duplication 'initial_state_by_sat_id': initial_state_by_sat_id, 'initial_state': [ append_state_params_with_enumeration ], # Duplication to accomodate downstream (runner_gp.py among others) -- TODO: cut out duplication 'activity_params': { **sat_ref_model['sat_model_params']["activity_params"], "min_duration_s": { **payload_ref_model["min_duration_s"], **sat_ref_model['sat_model_params']["activity_params"]["min_duration_s"] }, "intra-orbit_neighbor_direction_method": constellation_config[ "intra-orbit_neighbor_direction_method"] } }, 'gs_params': { 'gs_network_name': gs_network_config["gs_net_params"]['gs_network_name'], 'num_stations': gs_network_config["gs_net_params"]["num_stations"], 'num_gs': gs_network_config["gs_net_params"] ["num_stations"], # TODO: LOL are you serious right now. Get rid of this duplication. 'stations': gs_network_config["gs_net_params"]["stations"] }, 'obs_params': opsProfile_config['obs_params'], 'orbit_params': { 'sat_ids_by_orbit_name': io_tools.expand_orbits_list( constellation_config['orbit_params'], constellation_config['sat_id_prefix']), 'sat_orbital_elems': constellation_config['orbit_params']['sat_orbital_elems'] }, 'orbit_prop_data': orbit_prop_data } # make the satellite ID order. if the input ID order is default, then will assume that the order is the same as all of the IDs found in the power parameters orbit_prop_inputs['sat_params'][ 'sat_id_order'] = io_tools.make_and_validate_sat_id_order( # Pay close attention to this, because this is a mutated copy orbit_prop_inputs['sat_params']['sat_id_order'], orbit_prop_inputs['sat_params']['sat_id_prefix'], orbit_prop_inputs['sat_params']['num_sats'], all_sat_ids1) io_tools.validate_ids( validator=orbit_prop_inputs['sat_params']['sat_id_order'], validatee=all_sat_ids1) io_tools.validate_ids( validator=orbit_prop_inputs['sat_params']['sat_id_order'], validatee=all_sat_ids2) io_tools.validate_ids( validator=orbit_prop_inputs['sat_params']['sat_id_order'], validatee=all_sat_ids3) gs_id_order = io_tools.make_and_validate_gs_id_order( orbit_prop_inputs['gs_params']) orbit_prop_inputs['gs_params']['gs_id_order'] = gs_id_order obs_target_id_order = io_tools.make_and_validate_target_id_order( orbit_prop_inputs['obs_params']) orbit_prop_inputs['obs_params'][ 'obs_target_id_order'] = obs_target_id_order sim_params = {} sim_params['start_utc_dt'] = tt.iso_string_to_dt( sim_case_config['start_utc']) sim_params['end_utc_dt'] = tt.iso_string_to_dt( sim_case_config['end_utc']) sim_params['orbit_prop_params'] = orbit_prop_inputs sim_params[ 'orbit_link_params'] = { #orbit_link_params TODO: This restructuring is totally gross, but doing it to avoid changing constellation_sim.py substantiall right now "link_disables": opsProfile_config["link_disables"], "general_link_params": sim_gen_config["general_link_params"] } sim_params['sim_case_config'] = sim_case_config sim_params[ 'gp_general_params'] = gp_general_params #TODO: Would prefer to make this live in the GP only, as that's what makes sense. If it's used outside of the GP itself, shouldn't be called this & should have a spot in another file. sim_params['data_rates_params'] = data_rates_params sim_params['sim_gen_config'] = sim_gen_config sim_params['restore_pickle_cmdline_arg'] = data[ 'restore_pickle_cmdline_arg'] sim_params['output_path'] = data['output_path'] sim_params['const_sim_inst_params'] = { # TODO: This restructuring is totally gross, but doing it to avoid changing constellation_sim.py directly on this iteration "sim_gs_network_params": gs_network_config["sim_gs_network_params"], "sim_gs_params": { **gs_network_config["gs_net_params"], "time_epsilon_s": sim_params['sim_gen_config']["gs_time_epsilon_s"] }, "sim_satellite_params": sat_ref_model["sim_satellite_params"], "sim_plot_params": { **sim_gen_config["sim_plot_params"], 'start_utc_dt': sim_params[ 'start_utc_dt'], # TODO - copies of start_utc_dt/end_utc_dt are the most egregious offenders 'end_utc_dt': sim_params['end_utc_dt'] }, "sim_metrics_params": sim_gen_config["sim_metrics_params"], "sim_run_params": { **sim_gen_config["sim_run_params"], "sim_tick_s": sim_gen_config[ "timestep_s"], # TODO: This renaming is totally gross, but doing it to avoid changing constellation_sim.py directly on this iteration "start_utc_dt": sim_params['start_utc_dt'], # TODO - More copy stripping "end_utc_dt": sim_params['end_utc_dt'] }, "sim_run_perturbations": sim_case_config['sim_run_perturbations'], "gp_wrapper_params": gp_general_params["gp_wrapper_params"], "lp_wrapper_params": lp_general_params["lp_wrapper_params"], "lp_general_params": lp_general_params[ "lp_general_params"] # TODO - normalize this & the gp above, which isn't within this structure } sim_params["sat_config"] = { 'sat_model_definition': data['sat_ref_model']['sat_model_definition'] } sim_runner = const_sim.ConstellationSim(sim_params) sim_runner.run() output = sim_runner.post_run(data['output_path']) # define orbit prop outputs json # todo: add output here output_json = {} output_json['version'] = OUTPUT_JSON_VER output_json['update_time'] = datetime.utcnow().isoformat() return output_json
def run_gp(self,curr_time_dt,existing_rt_conts,gp_agent_ID,latest_gp_route_indx,sat_state_by_id, all_windows_dict, update_on_run=None): if update_on_run is not None: # is none in default sim, where it is set up fully in the constructor self.orbit_prop_params = update_on_run['orbit_prop_params'] self.orbit_link_params = update_on_run['orbit_link_params'] self.data_rates_params = update_on_run['data_rates_params'] self.sim_end_utc_dt = update_on_run['sim_run_params']['end_utc_dt'] def get_inp_time(time_dt,param_mins): new_time = curr_time_dt + timedelta(minutes=param_mins) return new_time ############################## # set up GP inputs sats_state = [{"sat_id":sat_id,"batt_e_Wh":sat_state['batt_e_Wh']} for sat_id,sat_state in sat_state_by_id.items()] if curr_time_dt >= self.sim_end_utc_dt: raise RuntimeWarning('should not be running GP after end of sim') gp_instance_params = { "version": "0.7", "planning_params": { "planning_start" : datetime_to_iso8601(get_inp_time(curr_time_dt,self.gp_params['planning_past_horizon_mins'])), "planning_fixed_end" : datetime_to_iso8601(min(self.sim_end_utc_dt,get_inp_time(curr_time_dt,self.gp_params['planning_horizon_fixed_mins']))), "planning_end_obs" : datetime_to_iso8601(min(self.sim_end_utc_dt,get_inp_time(curr_time_dt,self.gp_params['planning_horizon_obs_mins']))), "planning_end_xlnk" : datetime_to_iso8601(min(self.sim_end_utc_dt,get_inp_time(curr_time_dt,self.gp_params['planning_horizon_xlnk_mins']))), "planning_end_dlnk" : datetime_to_iso8601(min(self.sim_end_utc_dt,get_inp_time(curr_time_dt,self.gp_params['planning_horizon_dlnk_mins']))), "max_num_dlnks_allowed_after_planning_end_xlnk": self.gp_params['max_num_dlnks_allowed_after_planning_end_xlnk'] }, "activity_scheduling_params": { "plot_activity_scheduling_results": False }, "gp_agent_ID": gp_agent_ID, "initial_gp_route_indx": latest_gp_route_indx, "sats_state": sats_state } esrcs = existing_rt_conts esrcs_by_id = {rt_cont.ID:rt_cont for rt_cont in existing_rt_conts} existing_route_data = {} existing_routes = [dmr for esrc in esrcs for dmr in esrc.get_routes()] # we need to copy all of the existing routes here, because we update the schedule data volume attributes for routes and windows in the global planner. if we don't copy the routes, then we will be modifying the data route objects that satellites have in their Sim route containers ( and effectively propagating information instantaneously to the satellites - double plus ungood!). # We don't deepcopy because we don't want to make copise of the (many!) window objects contained within the routes. This is just to save memory, not because we are dependent on the objects staying the same (note: this is at least the intent - any behavior not conforming is a bug, as of June 3 2018) existing_routes_copy = [] for existing_route in existing_routes: existing_routes_copy.append(copy(existing_route)) existing_route_data['existing_routes'] = existing_routes_copy # utilization by DMR ID. We use data volume utilization here, but for current version of global planner this should be the same as time utilization existing_route_data['utilization_by_existing_route_id'] = {dmr.ID:esrc.get_dmr_utilization(dmr) for esrc in esrcs for dmr in esrc.get_routes()} # get relevant activity windows based on all_windows_dict (from gsn) and time horizons (calculated above) relevant_activity_windows = get_relevant_activity_windows(all_windows_dict,gp_instance_params['planning_params']) gp_inputs = { "orbit_prop_inputs": self.orbit_prop_params, "orbit_link_inputs": self.orbit_link_params, "gp_general_params_inputs": self.gp_general_params, "gp_instance_params_inputs": gp_instance_params, "data_rates_inputs": self.data_rates_params, "existing_route_data": existing_route_data, "relevant_activity_windows": relevant_activity_windows, "rs_s1_pickle": None, "rs_s2_pickle": None, "as_pickle": None, "file_params": {'new_pickle_file_name_pre': "const_sim_test_pickle"} } if self.output_path is not None: # save off a json with these gp params so we can run this instance again (for debug) with open(self.output_path+'pickles/most_recent_gp_instance_params.json','w') as f: json.dump(gp_instance_params, f, indent=4, separators=(',', ': ')) # save off a pickle with these gp params so we can run this instance again (for debug) with open(self.output_path+'pickles/most_recent_gp_existing_routes_input.pkl','wb') as f: pickle.dump( existing_route_data,f) ############################## # run the GP # Note: the GP is the only place in the whole sim, currently, where scheduled data volume attributes for data routes and windows are allowed to be updated # do some funny business to get access to the global planner code # path to runner_gp if sys.platform == 'win32': # todo: this is probably not the right way to support windows users... sys.path.append (os.path.join(self.gp_wrapper_params['gp_path'].replace('/','\\'),'python_runner')) else: sys.path.append (os.path.join(self.gp_wrapper_params['gp_path'],'python_runner')) # path to gp_tools sys.path.append (self.gp_wrapper_params['gp_path']) from runner_gp import PipelineRunner as GPPipelineRunner print('==Run GP==') print('note: running with local circinus_tools, not circinus_tools within GP repo') # NOTE: moved this from within GP pipeline runner, so we can still see this, but rest is suppressed if verbose_milp = false print('planning_start_dt: %s'%(tt.iso_string_to_dt(gp_instance_params['planning_params']['planning_start']))) print('planning_end_obs_dt: %s'%(tt.iso_string_to_dt(gp_instance_params['planning_params']['planning_end_obs']))) print('planning_end_xlnk_dt: %s'%(tt.iso_string_to_dt(gp_instance_params['planning_params']['planning_end_xlnk']))) print('planning_end_dlnk_dt: %s'%(tt.iso_string_to_dt(gp_instance_params['planning_params']['planning_end_dlnk']))) gp_pr = GPPipelineRunner() gp_output = gp_pr.run(gp_inputs,verbose=self.verbose_gp) print('==GP DONE==') ############################## # handle output if not gp_output['version'] == EXPECTED_GP_OUTPUT_VER: raise RuntimeWarning("Saw gp output version %s, expected %s"%(gp_output['version'],EXPECTED_GP_OUTPUT_VER)) scheduled_routes = gp_output['scheduled_routes'] all_updated_routes = gp_output['all_updated_routes'] latest_gp_route_indx = gp_output['latest_dr_uid'] scheduled_routes_set = set(scheduled_routes) existing_routes_set = set(existing_routes) sim_routes = [] for dmr in all_updated_routes: # we only want to consider this route if it was actually scheduled (delivers real data volume) or is an existing route. any new routes that were constructed in the global planner that don't get scheduled we can ignore ( there's no point in keeping track of them because they're useless - and we haven't yet told any of the satellites about them so we can discard them now). this is in contrast to existing routes which, even if they get unscheduled by the global planner, we have to keep track of because we could've already told the satellites about them after a previous global planning session ( so now we you tell them that decisions have changed) if not (dmr in scheduled_routes_set or dmr in existing_routes_set): continue dmr_dv_util = dmr.get_sched_utilization() # check if this sim route container already existed (and the data multi route already existed), and if so, grab the original creation time as well as determine if we have actually updated the simroutecontainer # Leave these times as None if (newly created,not updated) - in this case we'll update the times when we release the plans old_esrc = esrcs_by_id.get(dmr.ID,None) creation_dt = old_esrc.creation_dt if old_esrc else None update_dt = old_esrc.update_dt if (old_esrc and not old_esrc.updated_check(dmr,dmr_dv_util)) else None # we make an entirely new Sim route container for the route because that way we have a unique, new object, and we don't risk information sharing by inadvertantly updating the same object across satellites and ground network # note only one Sim route container per DMR # honestly we probably could just use a copy() here... sim_routes.append( SimRouteContainer(dmr.ID,dmr,dmr_dv_util,creation_dt,update_dt,gp_agent_ID) ) num_existing_routes_scheduled = len([dmr for dmr in scheduled_routes if dmr in existing_routes]) existing_routes_scheduled_utilization = sum([dmr.get_sched_utilization() for dmr in scheduled_routes if dmr in existing_routes]) existing_routes_utilization = sum(util for util in existing_route_data['utilization_by_existing_route_id'].values()) print('fraction of existing routes kept in schedule: %d / %d '%(num_existing_routes_scheduled,len(existing_routes))) print('fraction of existing utilization kept in schedule: %f / %f '%(existing_routes_scheduled_utilization,existing_routes_utilization)) print('to ease those copypasta blues:') print('%d %d '%(len(existing_routes),num_existing_routes_scheduled)) print('%f %f '%(existing_routes_utilization,existing_routes_scheduled_utilization)) self.first_iter = False return sim_routes, latest_gp_route_indx