def estimate(stage, dset): """Filter residuals Args: rundate (Datetime): The model run date. session (String): Name of session. prev_stage (String): Name of previous stage. stage (String): Name of current stage. """ partial_vectors = estimation.partial_vectors(dset, "estimate_method") max_iterations = config.tech.estimate_max_iterations.int for iter_num in itertools.count(start=1): log.info(f"Estimating parameters for iteration {iter_num}") estimation.call( "estimate_method", dset=dset, partial_vectors=partial_vectors, obs_noise=dset.observed_delay_ferr**2 + 0.01**2, ) rms = dset.rms("residual") log.info(f"{dset.num_obs} observations, postfit residual = {rms:.4f}") dset.write_as(stage=stage, label=iter_num - 1) if iter_num >= max_iterations: break # Detect and remove outliers keep_idx = estimation.apply_outlier_detectors( "estimate_outlier_detection", dset) if keep_idx.all(): break dset.subset(keep_idx) log.blank()
def estimate(stage, dset): """Filter residuals Args: prev_stage (String): Name of previous stage. stage (String): Name of current stage. """ partial_vectors = estimation.partial_vectors(dset, "estimate_method") max_iterations = config.tech.estimate_max_iterations.int for iter_num in itertools.count(start=1): log.info(f"Estimating parameters for iteration {iter_num}") estimation.call( "estimate_method", dset=dset, partial_vectors=partial_vectors, obs_noise=None # TODO: Add something here ) rms = dset.rms("residual") log.info(f"{dset.num_obs} observations, postfit residual = {rms:.4f}") dset.write_as(stage=stage, label=iter_num - 1, sat_name=dset.vars["sat_name"]) # TODO: # Do some iteration with removal of outliers? break if iter_num >= max_iterations: break estimation.solve_neq(dset) dset.write()
def estimate(stage, dset): """Filter residuals Args: rundate (Datetime): The model run date. session (String): Name of session. prev_stage (String): Name of previous stage. stage (String): Name of current stage. """ max_iterations = config.tech.estimate_max_iterations.int delay_unit = "meter" for iter_num in itertools.count(start=1): partial_vectors = estimation.partial_vectors(dset, "estimate_method") obs_noise = dset.observed_delay_ferr**2 + np.nan_to_num( dset.iono_delay_ferr)**2 + 0.01**2 log.info( f"Estimating parameters for iteration {iter_num} using Kalman Filter and continuous piecewise linear functions" ) estimation.call("estimate_method", dset=dset, partial_vectors=partial_vectors, obs_noise=obs_noise) rms = dset.rms("residual") log.info( f"{dset.num_obs} observations, rms of postfit residuals = {rms:.4f} {delay_unit}" ) dset.write_as(stage=stage, label=iter_num - 1) if iter_num >= max_iterations: break # Detect and remove outliers num_obs_before = dset.num_obs independent = config.tech.estimate_obs_rejectors_independent.bool dset = estimation.apply_observation_rejectors("estimate_obs_rejectors", dset, independent) log.blank() if dset.num_obs == num_obs_before or dset.num_obs == 0: break log.blank() if dset.num_obs > 0: estimation.solve_neq(dset) dset.write()
def estimate(rundate, session, prev_stage, stage): """Filter residuals Args: rundate (Datetime): The model run date. session (String): Name of session. prev_stage (String): Name of previous stage. stage (String): Name of current stage. """ dset = data.Dataset(rundate, tech=TECH, stage=prev_stage, dataset_name=session, dataset_id="last") dset.delete_from_file(stage=stage, dataset_id="all") partial_vectors = estimation.partial_vectors(dset, "estimate_method") max_iterations = config.tech.estimate_max_iterations.int outlier_limit = config.tech.estimate_outlier_limit.float for iter_num in itertools.count(start=1): log.info("Estimating parameters for {} (iteration {})", session, iter_num) estimation.call( "estimate_method", dset=dset, partial_vectors=partial_vectors, obs_noise=dset.observed_delay_ferr**2 + 0.01**2, ) rms = dset.rms("residual") log.info("{}: {} observations, postfit residual = {:.4f}", session, dset.num_obs, rms) dset.write_as(stage=stage, dataset_id=iter_num - 1) # Detect and remove outliers idx = np.abs(dset.residual) < outlier_limit * rms if iter_num >= max_iterations or idx.all(): break dset.subset(idx) log.info("Removing {} observations with residuals bigger than {:.4f}", sum(np.logical_not(idx)), outlier_limit * rms) log.blank()
def estimate(stage, dset): """Filter residuals Args: rundate (Datetime): The model run date. session (String): Name of session. prev_stage (String): Name of previous stage. stage (String): Name of current stage. """ max_iterations = config.tech.estimate_max_iterations.int for iter_num in itertools.count(start=1): partial_vectors = estimation.partial_vectors(dset, "estimate_method") log.info(f"Estimating parameters for iteration {iter_num}") estimation.call( "estimate_method", dset=dset, partial_vectors=partial_vectors, obs_noise=dset.observed_delay_ferr ** 2 + 0.01 ** 2, ) rms = dset.rms("residual") log.info(f"{dset.num_obs} observations, postfit residual = {rms:.4f}") dset.write_as(stage=stage, label=iter_num - 1) if iter_num >= max_iterations: break # Detect and remove outliers num_obs_before = dset.num_obs independent = config.tech.estimate_obs_rejectors_independent.bool dset = estimation.apply_observation_rejectors("estimate_obs_rejectors", dset, independent) log.blank() if dset.num_obs == num_obs_before: break estimation.solve_neq(dset) dset.write()
def calculate_estimate(stage, dset): """Calculate model parameters and estimate Args: stage (str): Name of current stage. dset (Dataset): A dataset containing the data. """ max_iterations = config.tech.max_iterations.int for iter_num in itertools.count(start=1): # CALCULATE # ----------- # Correction of station position in GCRS due to loading and tide effects site.calculate_site("site", dset, shape=(3, )) delta_pos = np.sum(dset.get_table("site").reshape( (dset.num_obs, -1, 3)), axis=1) dset.site_pos.add_to_gcrs(delta_pos) # Initialize models given in configuration file by adding model fields to Dataset delay.calculate_delay("calc_models", dset, write_levels=dict(gnss_range="operational")) if "obs" in dset.fields: dset.obs[:] = gnss.get_code_observation(dset) else: dset.add_float("obs", val=gnss.get_code_observation(dset), unit="meter") # Get model corrections if "calc" in dset.fields: dset.calc[:] = np.sum(dset.get_table("calc_models"), axis=1) else: dset.add_float("calc", val=np.sum(dset.get_table("calc_models"), axis=1), unit="meter") if "residual" in dset.fields: dset.residual[:] = dset.obs - dset.calc else: dset.add_float("residual", val=dset.obs - dset.calc, unit="meter") # Store calculate results log.info( f"{dset.num_obs} observations, residual = {dset.rms('residual'):.4f}" ) dset.write_as(stage="calculate", dataset_id=iter_num) dset.read() # TODO: workaround because caching does not work correctly # ESTIMATE # ---------- partial_vectors = estimation.partial_vectors(dset, "estimate_method") log.blank() # Space between iterations for clarity log.info(f"Estimating parameters for iteration {iter_num}") estimation.call("estimate_method", dset=dset, partial_vectors=partial_vectors, obs_noise=np.ones(dset.num_obs)) rms = dset.rms("residual") log.info(f"{dset.num_obs} observations, postfit residual = {rms:.4f}") dset.write_as(stage="estimate", dataset_id=iter_num - 1) dset.read() # TODO: workaround because caching does not work correctly # Detect and remove outliers based on residuals keep_idx = estimation.detect_outliers("estimate_outlier_detection", dset) if dset.meta["estimate_convergence_status"] and keep_idx.all(): log.info( f"Estimation convergence limit of {config.tech.convergence_limit.float:.3e} is fulfilled." ) break if iter_num >= max_iterations: break dset.subset(keep_idx) log.blank()