def get_ephemeris(rundate, sat_name): """Get CPF data for a given date The ephemeris data is stored in a dictionary with keys site_num (5 digits, e.g. '97401') and subdictionary with keys antenna_num (one letter and three digits, e.g. 'S002'). Contains information on site position and sigmas. Args: rundate (Datetime): Model run date. sat_name (String): Name of satellite. Returns: Dict: Ephemeris data. """ file_key = "slr_ephemeris" sat_data = get_satellite_vars(sat_name) provider_list = config.tech.prediction_providers.list # Find the latest version of the observation file versions = files.glob_variable(file_key, "version", r"\d{4}", file_vars=sat_data) ephemeris_data = dict() try: ephemeris_data["version"] = sorted(versions)[-1] providers = files.glob_variable(file_key, "provider", r"\w+", file_vars=ephemeris_data) for provider in provider_list: if provider in providers: ephemeris_data["provider"] = provider break else: log.fatal(f"No valid provider found: {', '.join(providers)}") except IndexError: log.info(f"Pattern: '{files.path(file_key)}'") log.info(f"No ephemeris data found") log.fatal( f"Download manually from ftp://cddis.nasa.gov/slr/cpf_predicts/") eph_parser = parsers.parse_key(file_key, file_vars=ephemeris_data) eph = calculate_initial_values(eph_parser.as_dict(), rundate) return eph
def get_ephemeris(rundate, sat_name): """Get CPF data for a given date The ephemeris data is stored in a dictionary with keys site_num (5 digits, e.g. '97401') and subdictionary with keys antenna_num (one letter and three digits, e.g. 'S002'). Contains information on site position and sigmas. Args: rundate (Datetime): Model run date. sat_name (String): Name of satellite. Returns: Dict: Ephemeris data. """ file_key = "slr_ephemeris" sat_data = get_satellite_vars(sat_name) provider_list = config.tech.prediction_providers.list # Find the latest version of the observation file versions = files.glob_variable(file_key, "version", r"\d{4}", file_vars=sat_data) ephemeris_data = dict() try: ephemeris_data["version"] = sorted(versions)[-1] providers = files.glob_variable(file_key, "provider", r"\w+", file_vars=ephemeris_data) for provider in provider_list: if provider in providers: ephemeris_data["provider"] = provider break except IndexError: print(f"Pattern: '{files.path(file_key)}'" ) # TODO: Because of format log does not print this properly log.fatal(f"No ephemeris data found") eph = parsers.parse_key(file_key, file_vars=ephemeris_data, rundate=rundate) eph = calculate_initial_values(eph) return eph
def list_sessions(rundate): """Sessions available for the given rundate Args: rundate (date): The model run date. Returns: List: Strings with names of available sessions. """ if config.where.get( "get_session_from_master", section=TECH, value=util.read_option_value( "--get_session_from_master", default=None), # TODO: add this to mg_config default=False, ).bool: skip_sessions = set( config.where.get( "skip_sessions", section="runner", value=util.read_option_value("--skip_sessions", default=None), default="", ).list) session_types = config.where.get( "session_types", section="runner", value=util.read_option_value("--session_types", default=None), default="", ).list master_schedule = apriori.get("vlbi_master_schedule", rundate=rundate) sessions = set( master_schedule.list_sessions(rundate, session_types=session_types)) sessions = sessions - skip_sessions return sessions else: obs_format = config.tech.get( "obs_format", section=TECH ).str # TODO: This always falls back on config.where .. file_vars = config.create_file_vars(rundate, TECH, session=None) del file_vars[ "session"] # TODO: Do not add None variables to file_vars? found_sessions = files.glob_variable(f"vlbi_obs_{obs_format}", variable="session", pattern=r"\w{2}", file_vars=file_vars) return found_sessions
def interactive(rundate, tech, session=""): """Read model run data and start an interactive session Read all datasets for the given rundate and techniques, and start an interactive IPython session. Args: rundate: The model run date. tech: String with the name of technique. """ # Read data for all techniques vars_dict = dict() list_of_vars = list() config.init(rundate, tech, session=session) vars_dict[tech] = config.files.vars.copy() del vars_dict[tech]["rundate"] # Register filekey suffix filekey_suffix = config.tech.get("filekey_suffix", default="").list if filekey_suffix: files.use_filelist_profiles(*filekey_suffix) # Read data for all available sessions and stages, add them to the global namespace stages = files.glob_variable("dataset_hdf5", "stage", ".+") for stage in sorted(stages): names, dset_ids = data.list_dataset_names_and_ids(rundate, stage=stage, **vars_dict[tech]) for name, dset_id in zip(names, dset_ids): var_name = "_".join([tech, stage, name, str(dset_id)]) short_var_name = tech[0] + str( len([ v for v in list_of_vars if v.lstrip().startswith(tech[0]) ])) globals()[var_name] = data.Dataset(rundate, stage=stage, dataset_name=name, dataset_id=dset_id, **vars_dict[tech]) globals()[short_var_name] = globals()[var_name] list_of_vars.append("{:>6s}, {}".format(short_var_name, var_name)) # Start an interactive IPython session IPython.embed(header="Available datasets:\n" + "\n".join(list_of_vars))
def __init__(self, time, version=None): """Set up a new Terrestrial Reference Frame object based on the VTRF Sinex files Args: time (Time): Time epochs for which to calculate positions. version (Str): Version string, can be used to differentiate for instance VTRF2015d from VTRF2016b. By adding a _snx or _ssc suffix to the version number the format can be specificed. """ super().__init__(time, version) # Parse solution and format from version (year_format) version = "last" if version is None else version # 'last' is default solution self.solution, _, fmt = version.partition("_") self.format = fmt if fmt else "snx" # Sinex is default format if self.solution == "last": candidates = files.glob_variable( self.file_key_pattern.format(self.format), "version", r"[^._]*") try: self.solution = max(candidates) except ValueError: log.fatal("No vtrf reference frame files found") self.version = f"{self.solution}_{self.format}"
def __init__(self, time, version=None): """Set up a new Terrestrial Reference Frame object based on the ITRF Sinex files For ITRFs from 2014 and later includes post-seismic deformations. Args: time (Time): Time epochs for which to calculate positions. version (Str): Version string, can be used to differentiate for instance ITRF2008 from ITRF2014. """ super().__init__(time, version) # Parse solution and format from version (solution_format) version = "last" if version is None else version # 'last' is default solution self.solution, _, fmt = version.partition("_") self.format = fmt if fmt else "snx" # Sinex is default format if self.solution == "last": candidates = files.glob_variable( self.file_key_pattern.format(self.format), "version", r"[^._]*") try: self.solution = max(candidates) except ValueError: log.fatal("No itrf reference frame files found") self.version = f"{self.solution}_{self.format}"
def file_vars(): """File variables that will be available during the running of this technique In addition, date and analysis variables are available. Returns: Dict: File variables special for this technique. """ file_vars = dict() # Add obs_version for ngs if config.tech.get("obs_format").str == "ngs": versions = files.glob_variable("vlbi_obs_ngs", "obs_version", r"\d{3}") if versions: file_vars["obs_version"] = max(versions) elif config.where.files.download_missing.bool: # Look online for a candidate log.info( "No NGS observation file found on disk: Looking for one online." ) obs_versions = [f"{v:03d}" for v in reversed(range(4, 10))] for obs_version in obs_versions: url = files.url("vlbi_obs_ngs", file_vars=dict(obs_version=obs_version), is_zipped=True, use_aliases=False) log.info(f"Looking for {url} ...") if url.exists(): file_vars["obs_version"] = obs_version break if not file_vars: log.fatal("No NGS observation file found") # Add obs_version for vgosdb if config.tech.get("obs_format").str == "vgosdb": versions = files.glob_variable("vlbi_obs_vgosdb", "obs_version", r"\d{3}") if versions: file_vars["obs_version"] = max(versions) elif config.where.files.download_missing.bool: # Look online for a candidate log.warn( "No VGOSDB wrapper file found. Not attempting to download. TODO" ) # log.info("No NGS wrapper file found on disk: Looking for one online.") # obs_versions = [f"{v:03d}" for v in reversed(range(4, 10))] # for obs_version in obs_versions: # url = files.url( # "vlbi_obs_ngs", file_vars=dict(obs_version=obs_version), is_zipped=True, use_aliases=False # ) # log.info(f"Looking for {url} ...") # if url.exists(): # file_vars["obs_version"] = obs_version # break if not file_vars: log.fatal("No VGOSDB observation file found") # Sinex file vars file_vars["solution"] = config.tech.sinex.solution.str file_vars["file_agency"] = config.tech.sinex.file_agency.str.lower() return file_vars
def update(self): """Read users from the file directories """ users = files.glob_variable("dataset_hdf5", "user", r"[a-z]+") simple_update_combobox(self, sorted(users))