def make_map(dset): """Plots station positions on a global map.""" web_map = config.where.get( "web_map", section=pipeline, value=util.read_option_value( "--web_map", default=None), # TODO: add this to mg_config default=False, ).bool if web_map: import webbrowser # Local import (Only needed if this function is called) map_path = config.files.path("output_web_map", file_vars={ **dset.vars, **dset.analysis }) if not map_path.exists(): writers.write_one("web_map", dset=dset) webbrowser.open(map_path.as_uri()) plot_map = config.where.get( "plot_map", section=pipeline, value=util.read_option_value( "--plot_map", default=None), # TODO: add this to mg_config default=False, ).bool if plot_map: _matplotlib_map(dset)
def calculate(rundate, session, prev_stage, stage): """Estimate model parameters Args: rundate (Datetime): The model run date. session (String): Name of session. prev_stage (String): Name of previous stage. stage (String): Name of current stage. """ dset = data.Dataset(rundate, tech=TECH, stage=prev_stage, dataset_name=session, dataset_id="last") dset.delete_from_file(stage=stage, dataset_id="all") # Run models adjusting station positions log.info("Calculating station displacements for {}", session) models.calculate_site("pos_models", dset, shape=(6, )) delta_pos = np.sum(dset.get_table("pos_models").reshape( (dset.num_obs, -1, 6)), axis=1) gcrs_dpos_1 = delta_pos[:, :3] gcrs_dvel_1 = ( dset.time.itrs2gcrs_dot @ dset.site_pos_1.convert_gcrs_to_itrs(gcrs_dpos_1)[:, :, None])[:, :, 0] dset.site_pos_1.add_to_gcrs( np.concatenate((gcrs_dpos_1, gcrs_dvel_1), axis=1)) gcrs_dpos_2 = delta_pos[:, 3:] gcrs_dvel_2 = ( dset.time.itrs2gcrs_dot @ dset.site_pos_2.convert_gcrs_to_itrs(gcrs_dpos_2)[:, :, None])[:, :, 0] dset.site_pos_2.add_to_gcrs( np.concatenate((gcrs_dpos_2, gcrs_dvel_2), axis=1)) log.blank() # Run models for each term of the observation equation log.info("Calculating theoretical delays for {}", session) models.calculate_delay("calc_models", dset) dset.add_float("obs", val=dset.observed_delay, unit="meter", write_level="operational") dset.add_float("calc", val=np.sum(dset.get_table("calc_models"), axis=1), unit="meter", write_level="operational") dset.add_float("residual", val=dset.obs - dset.calc, unit="meter", write_level="operational") log.blank() # Estimate clock polynomial log.info("Calculating clock polynomials for {}", session) max_iterations = config.tech.calculate_max_iterations.int outlier_limit = config.tech.calculate_outlier_limit.float store_outliers = config.tech.store_outliers.bool for iter_num in itertools.count(start=1): models.calculate_delay("correction_models", dset, dset) dset.calc[:] = np.sum(np.hstack((dset.get_table("calc_models"), dset.get_table("correction_models"))), axis=1) dset.residual[:] = dset.obs - dset.calc rms = dset.rms("residual") log.info("{}: {} observations, residual = {:.4f}", session, dset.num_obs, rms) # Store results dset.write_as(stage=stage, dataset_id=iter_num - 1) # Detect and remove extreme outliers idx = np.abs(dset.residual) < outlier_limit * rms if iter_num > max_iterations or idx.all(): break if store_outliers: bad_idx = np.logical_not(idx) log.info( f"Adding {np.sum(bad_idx)} observations to ignore_observation") bad_obs = np.char.add(np.char.add(dset.time.utc.iso[bad_idx], " "), dset.baseline[bad_idx]).tolist() with config.update_tech_config(rundate, TECH, session) as cfg: current = cfg.ignore_observation.observations.as_list(", *") updated = ", ".join(sorted(current + bad_obs)) cfg.update("ignore_observation", "observations", updated, source=util.get_program_name()) dset.subset(idx) log.info("Removing {} observations with residuals bigger than {:.4f}", sum(np.logical_not(idx)), outlier_limit * rms) log.blank() # Try to detect clock breaks if config.tech.detect_clockbreaks.bool: writers.write_one("vlbi_detect_clockbreaks", dset) dset.write()
def calculate(stage, dset): """Estimate model parameters Args: rundate (Datetime): The model run date. session (String): Name of session. prev_stage (String): Name of previous stage. stage (String): Name of current stage. """ # Run models adjusting station positions log.info(f"Calculating station displacements") site.calculate_site("site", dset) delta_pos = site.add("site", dset) dset.site_pos_1[:] = (dset.site_pos_1.gcrs + delta_pos[0].gcrs).trs dset.site_pos_2[:] = (dset.site_pos_2.gcrs + delta_pos[1].gcrs).trs log.blank() # Run models for each term of the observation equation log.info(f"Calculating theoretical delays") delay.calculate_delay("delay", dset) delta_delay = delay.add("delay", dset) dset.add_float("obs", val=dset.observed_delay, unit="meter", write_level="operational") dset.add_float("calc", val=delta_delay, unit="meter", write_level="operational") dset.add_float("residual", val=dset.obs - dset.calc, unit="meter", write_level="operational") log.blank() # Estimate clock polynomial log.info(f"Calculating clock polynomials") max_iterations = config.tech.calculate_max_iterations.int outlier_limit = config.tech.calculate_outlier_limit.float store_outliers = config.tech.store_outliers.bool for iter_num in itertools.count(start=1): delay.calculate_delay("delay_corr", dset, dset) delta_correction = delay.add("delay_corr", dset) dset.calc[:] = dset.calc + delta_correction dset.residual[:] = dset.obs - dset.calc rms = dset.rms("residual") log.info(f"{dset.num_obs} observations, residual = {rms:.4f}") # Store results dset.write_as(stage=stage, label=iter_num - 1) # Detect and remove extreme outliers idx = np.abs(dset.residual) < outlier_limit * rms if iter_num > max_iterations or idx.all(): break if store_outliers: bad_idx = np.logical_not(idx) log.info( f"Adding {np.sum(bad_idx)} observations to ignore_observation") bad_obs = np.char.add(np.char.add(dset.time.utc.iso[bad_idx], " "), dset.baseline[bad_idx]).tolist() with config.update_tech_config( dset.analysis["rundate"], pipeline, session=dset.vars["session"]) as cfg: current = cfg.ignore_observation.observations.as_list(", *") updated = ", ".join(sorted(current + bad_obs)) cfg.update("ignore_observation", "observations", updated, source=util.get_program_name()) dset.subset(idx) log.info( f"Removing {sum(~idx)} observations with residuals bigger than {outlier_limit * rms}" ) log.blank() # Try to detect clock breaks if config.tech.detect_clockbreaks.bool: writers.write_one("vlbi_detect_clockbreaks", dset=dset) dset.write()