def __init__(self, model_name="b0fee2e6_20190921-12:56:55",\ epoch=15, results_mapper_file="mapper.txt",\ inp_au=240, inp_al=-240, inp_symh=-10,\ inp_asymh=15, inp_f107=115, use_manual_bz=False,\ use_manual_by=False, use_manual_bx=False,\ use_manual_vx=False, use_manual_np=False,\ use_manual_month=False, manual_bz_val=None,\ manual_by_val=None, manual_bx_val=None,\ manual_vx_val=None, manual_np_val=None,\ manual_mnth_val=None): """ Load the SW/IMF data and the mean input parameters used during training! We may also need to clean up the data!! """ import pathlib # Load the RT SW/IMF data data_obj = dwnld_sw_imf_rt.DwnldRTSW() url_data = data_obj.dwnld_file() if url_data is not None: data_obj.read_url_data(url_data) self.sw_imf_df = data_obj.read_stored_data() self.original_sw_imf_data = deepcopy(self.sw_imf_df) # we need a 1-minute interval data! self.sw_imf_df.set_index('propagated_time_tag', inplace=True) self.sw_imf_df = self.sw_imf_df.resample('1min').median() # linearly interpolate data self.sw_imf_df.interpolate(method='linear', axis=0, inplace=True) # Now we need to normalize the input based on train data # Load mean and std values of input features from a json file inp_par_file_name = "input_mean_std.json" file_path = pathlib.Path(inp_par_file_name) if file_path.exists(): inp_mean_std_file_path = file_path.as_posix() else: inp_mean_std_file_path = pathlib.Path.cwd().joinpath("amp_model",\ inp_par_file_name).as_posix() with open(inp_mean_std_file_path) as jf: params_mean_std_dct = json.load(jf) # Note When training hte model we had Vx as negative! but in real time data # it is used as a positive value! so change the sign # we need a switch to tell us if we've used manual inputs! # However, ignore the season for manual input part # since we can see how the plots vary with season # for same IMF/solarwind data! self.used_manual_inputs = False if use_manual_vx: self.sw_imf_df["Vx"] = manual_vx_val self.used_manual_inputs = True else: self.sw_imf_df["Vx"] = -1. * self.sw_imf_df["speed"] # Normalize self.sw_imf_df["Vx"] = (self.sw_imf_df["Vx"]-params_mean_std_dct["Vx_mean"])/\ params_mean_std_dct["Vx_std"] if use_manual_np: self.sw_imf_df["Np"] = (manual_np_val-params_mean_std_dct["Np_mean"])/\ params_mean_std_dct["Np_std"] self.used_manual_inputs = True else: self.sw_imf_df["Np"] = (self.sw_imf_df["density"]-params_mean_std_dct["Np_mean"])/\ params_mean_std_dct["Np_std"] if use_manual_bz: self.sw_imf_df["Bz"] = (manual_bz_val-params_mean_std_dct["Bz_mean"])/\ params_mean_std_dct["Bz_std"] self.used_manual_inputs = True else: self.sw_imf_df["Bz"] = (self.sw_imf_df["bz"]-params_mean_std_dct["Bz_mean"])/\ params_mean_std_dct["Bz_std"] if use_manual_by: self.sw_imf_df["By"] = (manual_by_val-params_mean_std_dct["Bz_mean"])/\ params_mean_std_dct["Bz_std"] self.used_manual_inputs = True else: self.sw_imf_df["By"] = (self.sw_imf_df["by"]-params_mean_std_dct["By_mean"])/\ params_mean_std_dct["By_std"] if use_manual_bx: self.sw_imf_df["Bx"] = (manual_bx_val-params_mean_std_dct["Bz_mean"])/\ params_mean_std_dct["Bz_std"] self.used_manual_inputs = True else: self.sw_imf_df["Bx"] = (self.sw_imf_df["bx"]-params_mean_std_dct["Bx_mean"])/\ params_mean_std_dct["Bx_std"] if use_manual_month: self.sw_imf_df["month_sine"] = numpy.sin(2*numpy.pi/12 *\ manual_mnth_val) self.sw_imf_df["month_cosine"] = numpy.cos(2*numpy.pi/12 *\ manual_mnth_val) else: self.sw_imf_df["month_sine"] = numpy.sin(2*numpy.pi/12 *\ self.sw_imf_df.index.month) self.sw_imf_df["month_cosine"] = numpy.cos(2*numpy.pi/12 *\ self.sw_imf_df.index.month) self.sw_imf_df["au"] = (inp_au-params_mean_std_dct["au_mean"])/\ params_mean_std_dct["au_std"] self.sw_imf_df["al"] = (inp_al-params_mean_std_dct["al_mean"])/\ params_mean_std_dct["al_std"] self.sw_imf_df["symh"] = (inp_symh-params_mean_std_dct["symh_mean"])/\ params_mean_std_dct["symh_std"] self.sw_imf_df["asymh"] = (inp_asymh-params_mean_std_dct["asymh_mean"])/\ params_mean_std_dct["asymh_std"] self.sw_imf_df["F107"] = (inp_f107-params_mean_std_dct["F107_mean"])/\ params_mean_std_dct["F107_std"] # print("solar wind imf data!!!") # print(self.sw_imf_df.columns) # print("solar wind imf data!!!") # Load the params res_map_file_path = pathlib.Path(results_mapper_file) if res_map_file_path.exists(): res_map_file_path = res_map_file_path.as_posix() else: res_map_file_path = pathlib.Path.cwd().joinpath("amp_model",\ results_mapper_file).as_posix() with open(res_map_file_path) as jf: self.params_dict = json.load(jf) self.params_dict = self.params_dict["../data/trained_models/" +\ model_name] # load the reference numpy array to reconstruct mlat/mlt locs of jr self.ref_amp_df_nth = self._load_sample_csv_file()[0] self.ref_amp_df_sth = self._load_sample_csv_file()[1] # Load the model self.model = self.load_model(model_name, epoch)
def __init__(self, model_name="model_anlyz", epoch=200, omn_pred_hist=120): """ Load the SW/IMF data and the mean input parameters used during training! We may also need to clean up the data!! """ import pathlib # Load the RT SW/IMF data data_obj = dwnld_sw_imf_rt.DwnldRTSW() url_data = data_obj.dwnld_file() if url_data is not None: data_obj.read_url_data(url_data) self.sw_imf_df = data_obj.read_stored_data() self.original_sw_imf_data = deepcopy(self.sw_imf_df) self.original_sw_imf_data.set_index('propagated_time_tag', inplace=True) self.original_sw_imf_data = self.original_sw_imf_data.resample( '1min').median() # manipulate original sw imf data for plotting # rename some parameters self.original_sw_imf_data[ "Vx"] = -1. * self.original_sw_imf_data["speed"] self.original_sw_imf_data.rename(\ columns={\ "density" : "Np", "bx" : "Bx", "by":"By", "bz":"Bz"\ }, inplace=True\ ) # we need a 1-minute interval data! self.sw_imf_df.set_index('propagated_time_tag', inplace=True) self.sw_imf_df = self.sw_imf_df.resample('1min').median() # linearly interpolate data self.sw_imf_df.interpolate(method='linear', axis=0, inplace=True) # Now we need to normalize the input based on train data # Load mean and std values of input features from a json file inp_par_file_name = "input_mean_std.json" file_path = pathlib.Path(inp_par_file_name) if file_path.exists(): inp_mean_std_file_path = file_path.as_posix() else: inp_mean_std_file_path = pathlib.Path.cwd().joinpath("amp_model",\ inp_par_file_name).as_posix() with open(inp_mean_std_file_path) as jf: params_mean_std_dct = json.load(jf) # Load Vx self.sw_imf_df["Vx"] = -1. * self.sw_imf_df["speed"] # Normalize Vx self.sw_imf_df["Vx"] = (self.sw_imf_df["Vx"]-params_mean_std_dct["Vx_mean"])/\ params_mean_std_dct["Vx_std"] # Load and Normalize Np self.sw_imf_df["Np"] = (self.sw_imf_df["density"]-params_mean_std_dct["Np_mean"])/\ params_mean_std_dct["Np_std"] # Load and Normalize Bz self.sw_imf_df["Bz"] = (self.sw_imf_df["bz"]-params_mean_std_dct["Bz_mean"])/\ params_mean_std_dct["Bz_std"] # Load and Normalize By self.sw_imf_df["By"] = (self.sw_imf_df["by"]-params_mean_std_dct["By_mean"])/\ params_mean_std_dct["By_std"] # Load and Normalize Bx self.sw_imf_df["Bx"] = (self.sw_imf_df["bx"]-params_mean_std_dct["Bx_mean"])/\ params_mean_std_dct["Bx_std"] # Load the model self.model = self.load_model(model_name, epoch) self.omn_pred_hist = omn_pred_hist
def main(): # setup a title st.title('Select a tool') # Once we have the dependencies, add a selector for the app mode on the sidebar. model_option = st.selectbox( '', ( 'Daily geoactivity tool',\ 'GPS TEC tool',\ 'AMPERE FACs forecast',\ 'Substorm onset forecast' ) ) # we'll need a session state to switch between dates # basically the prev day and next day buttons! # session state details for the geo_activity_tool page # session state details for the geo_activity_tool page geo_all_param_list = [ "DSCOVER", "OMNI", "STORM",\ "SUBSTORM", "SUPERDARN" ] nhours_plot_default = 0 ndays_plot_default = 1 inp_start_date = datetime.date(2018, 1, 2) inp_start_time = datetime.time(0, 0) # session state details for the geo_activity_tool page # session state details for the geo_activity_tool page # session state details for the sson_model page # session state details for the sson_model page data_obj = dwnld_sw_imf_rt.DwnldRTSW() url_data = data_obj.dwnld_file() if url_data is not None: data_obj.read_url_data(url_data) # repeat the operations we do with sson_model calc sw_imf_df = data_obj.read_stored_data() sw_imf_df.set_index('propagated_time_tag', inplace=True) sw_imf_df = sw_imf_df.resample('1min').median() # linearly interpolate data sw_imf_df.interpolate(method='linear', axis=0, inplace=True) omn_end_time = sw_imf_df.index.max() # session state details for the sson_model page # session state details for the sson_model page # common session state details for the all pages state = session_state.get(\ plot_start_date=inp_start_date,\ plot_start_time=inp_start_time,\ plot_param_list=geo_all_param_list,\ plot_nhours_plot=nhours_plot_default,\ plot_ndays_plot=ndays_plot_default,\ date_sson_hist_plot=omn_end_time ) if model_option == 'Daily geoactivity tool': geo_activity_page( state,\ local_data_store="./geo_tool/data/sqlite3/",\ plot_style="classic",\ inp_start_date=inp_start_date,\ inp_start_time=inp_start_time,\ all_param_list=geo_all_param_list,\ nhours_plot_default=nhours_plot_default,\ ndays_plot_default=ndays_plot_default ) elif model_option == 'GPS TEC tool': gps_tec_page() elif model_option == 'AMPERE FACs forecast': fac_model_page() else: ss_onset_page(state)
def __init__(self, model_name="model_paper",\ epoch=200, omn_pred_hist=120,\ local_data_folder="data/"): """ Load the SW/IMF data and the mean input parameters used during training! We may also need to clean up the data!! """ import pathlib # folder to store the data/predictions # this will be used so that we don't repeat # calculations if pathlib.Path(local_data_folder).exists(): self.local_data_folder = local_data_folder else: self.local_data_folder = pathlib.Path.cwd().parent.joinpath( local_data_folder) self.local_data_folder = self.local_data_folder.as_posix() + "/" # get/set some filenames self.latest_pred_data_file = self.local_data_folder + "latest_preds.npz" self.hist_pred_data_file = self.local_data_folder + "hist_preds.npz" # Load the RT SW/IMF data data_obj = dwnld_sw_imf_rt.DwnldRTSW() url_data = data_obj.dwnld_file() if url_data is not None: data_obj.read_url_data(url_data) self.sw_imf_df = data_obj.read_stored_data() self.original_sw_imf_data = deepcopy(self.sw_imf_df) # we need a 1-minute interval data! self.sw_imf_df.set_index('propagated_time_tag', inplace=True) self.sw_imf_df = self.sw_imf_df.resample('1min').median() # linearly interpolate data self.sw_imf_df.interpolate(method='linear', axis=0, inplace=True) # Now we need to normalize the input based on train data # Load mean and std values of input features from a json file inp_par_file_name = "input_mean_std.json" file_path = pathlib.Path(inp_par_file_name) if file_path.exists(): inp_mean_std_file_path = file_path.as_posix() else: inp_mean_std_file_path = pathlib.Path.cwd().joinpath("amp_model",\ inp_par_file_name).as_posix() with open(inp_mean_std_file_path) as jf: params_mean_std_dct = json.load(jf) # Load Vx self.sw_imf_df["Vx"] = -1. * self.sw_imf_df["speed"] # Normalize Vx self.sw_imf_df["Vx"] = (self.sw_imf_df["Vx"]-params_mean_std_dct["Vx_mean"])/\ params_mean_std_dct["Vx_std"] # Load and Normalize Np self.sw_imf_df["Np"] = (self.sw_imf_df["density"]-params_mean_std_dct["Np_mean"])/\ params_mean_std_dct["Np_std"] # Load and Normalize Bz self.sw_imf_df["Bz"] = (self.sw_imf_df["bz"]-params_mean_std_dct["Bz_mean"])/\ params_mean_std_dct["Bz_std"] # Load and Normalize By self.sw_imf_df["By"] = (self.sw_imf_df["by"]-params_mean_std_dct["By_mean"])/\ params_mean_std_dct["By_std"] # Load and Normalize Bx self.sw_imf_df["Bx"] = (self.sw_imf_df["bx"]-params_mean_std_dct["Bx_mean"])/\ params_mean_std_dct["Bx_std"] self.omn_pred_hist = omn_pred_hist self.model_name = model_name self.epoch = epoch