Ejemplo n.º 1
0
    def _read_data(self):
        """Read data needed by this Celestial Reference Frame for calculating positions of sources

        Returns:
            Dict:  Dictionary containing data about each source defined in this reference frame.
        """
        data = parsers.parse_key(file_key="icrf2_non_vcs").as_dict()
        data.update(parsers.parse_key(file_key="icrf2_vcs_only").as_dict())

        return data
Ejemplo n.º 2
0
    def _read_data(self):
        """Read data needed by this Reference Frame for calculating positions of sites

        Delegates to _read_data_<self.format> to read the actual data.

        Returns:
            Dict:  Dictionary containing data about each site defined in this reference frame.
        """
        data = parsers.parse_key(file_key="icrf2_non_vcs").as_dict()
        data.update(parsers.parse_key(file_key="icrf2_vcs_only").as_dict())

        return data
Ejemplo n.º 3
0
def get_gravitational_deformation(rundate):
    """Get excess delay due to gravitational deformation as a function of elevation
    Returns:
        A dictionary of interpolator functions.
    """

    versions = list(
        config.files.glob_variable("vlbi_gravitational_deformation", "version",
                                   r"[\w]+"))

    dates = [datetime.strptime(d, "%Y%b%d") for d in versions]
    max_idx = dates.index(max(dates))
    file_vars = dict(version=versions[max_idx])
    data = parsers.parse_key(file_key="vlbi_gravitational_deformation",
                             file_vars=file_vars).as_dict()

    interpolators = dict()

    for station, values in data.items():
        if (datetime.combine(rundate, time.max) > values["start"]
                and datetime.combine(rundate, time.min) < values["end"]):
            interpolators[station] = interpolate.interp1d(values["elevation"],
                                                          values["delay"],
                                                          kind="cubic")

    return interpolators
Ejemplo n.º 4
0
def get_gravity_coefficients(gravity_field, truncation_level, rundate):
    """Get coefficients for a gravity field

    The coefficient files come with normalized gravity coefficients.

    The coefficients are returned in a dict with keys `C` and `S`,
    each containing a table of coefficients `C[i, j]` and `S[i, j]`.

    Args:
        gravity_field:    Name of gravity field.
        truncation_level: Level of degree and order where coeffs are truncated.
        rundate:          Start time of integration.

    Returns:
        A dictionary containing C and S coefficients.
    """
    log.info(f"Reading gravity field {gravity_field} up to degree and order {truncation_level}")
    try:
        gravity_parser = parsers.parse_key(
            "gravity_coefficients", file_vars=dict(gravity_field=gravity_field), num_degrees=truncation_level
        )
    except FileNotFoundError:
        log.fatal(f"Unknown gravity field {gravity_field}, exiting!")

    gravity_coefficients = gravity_parser.as_dict()
    if gravity_field == "egm2008":  # TODO: Why do we treat this differently ... Should be done by properties not name
        apply_rates(gravity_coefficients, truncation_level, rundate)

    return gravity_coefficients
Ejemplo n.º 5
0
def get_terrapos_position():
    """Get Terrapos position Dataset by reading Terrapos position output file

    Returns:
        dset (Dataset): Dataset with following fields:

        ====================  ==================  =================================================================
         Field                 Type                Description
        ====================  ==================  =================================================================
         gpsweek               numpy.ndarray       GPS week
         gpssec                numpy.ndarray       Seconds of GPS week
         head                  numpy.ndarray       Head in [deg]
         height                numpy.ndarray       Ellipsoidal height in [m]
         lat                   numpy.ndarray       Latitude in [deg]
         lon                   numpy.ndarray       Longitude in [deg]
         num_sat               numpy.ndarray       Number of satellites
         pdop                  numpy.ndarray       Position Dilution of Precision (PDOP)
         pitch                 numpy.ndarray       Pitch in [deg]
         reliability_east      numpy.ndarray       East position external reliability in [m] #TODO: Is that correct?
         reliability_height    numpy.ndarray       Height position external reliability in [m] #TODO: Is that correct?
         reliability_north     numpy.ndarray       North position external reliability in [m] #TODO: Is that correct?
         roll                  numpy.ndarray       Roll in [deg]
         sigma_east            numpy.ndarray       Standard deviation of East position in [m] #TODO: Is that correct?
         sigma_height          numpy.ndarray       Standard deviation of Height position in [m] #TODO: Is that correct?
         sigma_north           numpy.ndarray       Standard deviation of North position in [m] #TODO: Is that correct?
         site_pos              PositionTable       PositionTable object with given station coordinates
         time                  TimeTable           Observation time given as TimeTable object
        ====================  ==================  =================================================================
    """
    dset = parsers.parse_key("terrapos_output_position").as_dataset()
    dset.add_time("time", val=_get_time(dset), scale="gps")
    dset.add_position("site_pos", time="time", itrs=_get_site_pos(dset))

    return dset
Ejemplo n.º 6
0
def get_ephemeris(rundate, sat_name):
    """Get CPF data for a given date

    The ephemeris data is stored in a dictionary with tables of times and position under the "positions" key.
    This table is interpolated in the calculate_initial_values method.
    Args:
        rundate (Datetime):   Model run date.
        sat_name (String):    Name of satellite.

    Returns:
        Dict: Ephemeris data.
    """
    file_key = "slr_ephemeris"
    ephemeris_data = get_satellite_vars(sat_name)
    provider_list = config.tech.prediction_providers.list
    # Find the latest version of the observation file
    versions = config.files.glob_variable(file_key, "version", r"\d{4}", file_vars=ephemeris_data)

    try:
        ephemeris_data["version"] = sorted(versions)[-1]
        providers = config.files.glob_variable(file_key, "provider", r"\w+", file_vars=ephemeris_data)
        for provider in provider_list:
            if provider in providers:
                ephemeris_data["provider"] = provider
                break
        else:
            log.fatal(f"No valid provider found: {', '.join(providers)}")
    except IndexError:
        log.info(f"Pattern: '{config.files.path(file_key)}'")
        log.info(f"No ephemeris data found")
        log.fatal(f"Download manually from ftp://cddis.nasa.gov/slr/cpf_predicts/")
    eph_parser = parsers.parse_key(file_key, file_vars=ephemeris_data)
    eph = calculate_initial_values(eph_parser.as_dict(), rundate)

    return eph
Ejemplo n.º 7
0
def get_terrapos_residual():
    """Get Terrapos residual Dataset by reading Terrapos residual output file

    Returns:
        dset (Dataset): Dataset with following fields:

        ====================  ==================  =================================================================
         Field                 Type                Description
        ====================  ==================  =================================================================
         azimuth               numpy.ndarray       Azimuth of satellites in [deg]
         elevation             numpy.ndarray       Elevation of satellites in [deg]
         gpsweek               numpy.ndarray       GPS week
         gpssec                numpy.ndarray       Seconds of GPS week
         residual_code         numpy.ndarray       Code (pseudorange) residuals in [m]
         residual_doppler      numpy.ndarray       Doppler residuals in [m]
         residual_phase        numpy.ndarray       Carrier-phase residuals in [m]
         satellite             numpy.ndarray       Satellite PRN number together with GNSS identifier (e.g. G07)
         system                numpy.ndarray       GNSS identifier
         time                  TimeTable           Observation time given as TimeTable object
        ====================  ==================  =================================================================

    """
    dset = parsers.parse_key("terrapos_output_residual").as_dataset()
    dset.add_time("time", val=_get_time(dset), scale="gps")

    return dset
Ejemplo n.º 8
0
    def __init__(self, file_key="gnss_antex"):
        """Set up a new GNSS antenna correction object by parsing ANTEX file

        The parsing is done by :mod:`where.parsers.gnss_antex`.
        """
        parser = parsers.parse_key(file_key)
        self.data = parser.as_dict()
        self.file_path = parser.file_path
Ejemplo n.º 9
0
    def __init__(self, rundate, file_key="gnss_sinex_bias"):
        """Set up a new GNSS bias object by parsing SINEX bias file

        The parsing is done by :mod:`where.parsers.gnss_sinex_bias`.
        """
        parser = parsers.parse_key(file_key=file_key,
                                   file_vars=config.date_vars(rundate))
        self.data = parser.as_dict()
Ejemplo n.º 10
0
    def _read_data(self):
        """Read data needed by this Reference Frame for calculating positions of sites

        Delegates to _read_data_<self.format> to read the actual data.

        Returns:
            Dict:  Dictionary containing data about each site defined in this reference frame.
        """
        return parsers.parse_key(file_key="vascc_crf").as_dict()
Ejemplo n.º 11
0
Archivo: eop.py Proyecto: yxw027/where
    def __init__(self,
                 eop_data,
                 time,
                 models=None,
                 pole_model=None,
                 window=4,
                 sources=None):
        """Create an Eop-instance that calculates EOP corrections for the given time epochs

        The interpolation window is based on https://hpiers.obspm.fr/iers/models/interp.f which uses 4 days.

        Args:
            eop_data (Dict): Dictionary of tabular EOP data, typically read from file.
            time (Time):     Time epochs for which to calculate EOPs.
            models (Tuple):  Optional tuple of EOP correction models. If not given, the config setting is used.
            window (Int):    Number of days to use as interpolation window.
        """
        if time.scale == "ut1":
            raise ValueError(f"Time scale of 'time' cannot be 'ut1'")
        self.window = window
        self.time = time
        self.sources = sources
        self.data = self.pick_data(eop_data, self.time, self.window, sources)
        self.calculate_leap_second_offset()

        # Figure out which correction models to use
        self.models = config.tech.eop_models.tuple if models is None else models

        if "rg_zont2" in self.models:
            self.remove_low_frequency_tides()

        # Figure out which pole model to use:
        self.pole_model = config.tech.get("eop_pole_model",
                                          value=pole_model,
                                          default=None).str
        if self.pole_model == "mean_2015":
            # Read the tabulated data needed for the model
            data = parsers.parse_key("eop_mean_pole_2015").as_dict()
            self.mean_pole_last_idx = len(data["year"]) - 1
            self.mean_pole_years = interpolate.interp1d(
                data["year"],
                data["year"],
                kind="previous",
                fill_value="extrapolate")
            self.mean_pole_idx = interpolate.interp1d(data["year"],
                                                      range(len(data["year"])),
                                                      kind="previous",
                                                      fill_value=np.nan,
                                                      bounds_error=False)
            self.mean_pole_x = interpolate.interp1d(range(len(data["x"])),
                                                    data["x"],
                                                    kind="previous",
                                                    fill_value="extrapolate")
            self.mean_pole_y = interpolate.interp1d(range(len(data["y"])),
                                                    data["y"],
                                                    kind="previous",
                                                    fill_value="extrapolate")
Ejemplo n.º 12
0
def get_vlbi_master_schedule(rundate=None):
    """Read master file

    If rundate is not specified, used file_vars that are already set.
    """

    file_vars = None if rundate is None else config.date_vars(rundate)
    parser = parsers.parse_key("vlbi_master_file", file_vars=file_vars)

    return VlbiMasterSchedule(parser.as_dict(), file_vars["yyyy"])
Ejemplo n.º 13
0
    def _read_data(self):
        """Read data needed by this Celestial Reference Frame for calculating positions of sources

        Returns:
            Dict:  Dictionary containing data about each site defined in this reference frame.
        """
        data = parsers.parse_key(
            file_key="icrf3", file_vars=dict(catalog=self.catalog)).as_dict()

        return data
Ejemplo n.º 14
0
def get_atmospheric_tides():
    """Create interpolators for atmospheric tides coefficients

    Reads the atmospheric tides coefficents from file using the
    AtmosphericTidesParser and creates an RectBiVariateSpline interpolator in
    longtitude and latitude for each coefficient type.

    Longtitude is given from 0 to 360 degrees, while the interpolator should
    work for longtidues from -180 to 180 degrees. The dataset is therefore
    shifted accordingly. The RectBivariateSpline requires longtitude and
    latitude to be in strictly increasing order.

    Returns:
        A dictionary of interpolator functions.
    """
    model = config.tech.atmospheric_tides.str
    file_key = "atmospheric_tides_" + model if model else "atmospheric_tides"
    at_data = parsers.parse_key(file_key=file_key).as_dict()

    interpolators = dict()
    lon = np.array(at_data.pop("lon"))
    lat = np.array(at_data.pop("lat"))
    lon = np.unique(lon)
    _, idx = np.unique(lat, return_index=True)

    # Restore original order
    lat = lat[np.sort(idx)]

    num_value = (len(lat), len(lon))

    # Latitude is given from 90 degrees to -90 degrees
    lat = lat[::-1]
    # Strip the last longtitude to avoid double entry for 0 degrees
    lon = lon[:-1]
    # Shift longtitude -180 degrees
    lon = (lon + 180) % 360 - 180
    idx = lon.argsort()
    lon = lon[idx]

    lat = np.radians(lat)
    lon = np.radians(lon)

    for coeff in at_data.keys():
        values = np.array(at_data[coeff]).reshape(num_value)

        # Latitude is given from 90 degrees to -90 degrees
        values = values[::-1, :]
        # Strip the last longtitude to avoid double entry for 0 degrees
        values = values[:, :-1]
        # Shift longtitude -180 degrees
        values = values[:, idx]

        interpolators[coeff] = RectBivariateSpline(lon, lat, values.T)

    return interpolators
Ejemplo n.º 15
0
Archivo: slr.py Proyecto: uasau/where
def write_to_dataset(dset, rundate=None, obs_format=None, **obs_args):

    obs_format = config.tech.get("obs_format", section=TECH, value=obs_format).str
    log.info(f"Reading observation file in {obs_format} format")

    file_vars1 = config.create_file_vars(rundate, TECH, **obs_args)
    last_date_to_read = rundate + timedelta(days=config.tech.arc_length.float + 1)
    parser1 = parsers.parse_key(f"slr_obs_{obs_format}", file_vars1)
    file_vars2 = config.create_file_vars(last_date_to_read, TECH, **obs_args)
    parser2 = parsers.parse_key(f"slr_obs_{obs_format}", file_vars2)

    if parser1.data_available and parser2.data_available:
        data = _write_to_dataset(parser1, parser2, dset, rundate)
        _write_met_to_dataset(dset, data, rundate)
    elif parser2.data_available and not parser2.data_available:
        raise exceptions.MissingDataError(
            f"No observation file in {obs_format} format found for {last_date_to_read.month}"
        )
    else:
        raise exceptions.MissingDataError(f"No observation file in {obs_format} format found for {rundate}")
Ejemplo n.º 16
0
def get_vmf1_grid(time):
    """Read VMF1 gridded data files relevant for the given time epochs

    Args:
        time (Time):    observation epochs

    Returns:
        A dictionary of functions that can interpolate in the VMF1 dataset.
    """
    data = dict()
    min_time = time.utc.datetime if len(time) == 1 else min(time.utc.datetime)
    max_time = time.utc.datetime if len(time) == 1 else max(time.utc.datetime)
    start_hour = 6 * (min_time.hour // 6)
    start = min_time.replace(hour=start_hour,
                             minute=0,
                             second=0,
                             microsecond=0)
    end_hour = 6 * (max_time.hour // 6)
    end = max_time.replace(hour=end_hour, minute=0, second=0,
                           microsecond=0) + timedelta(hours=6)

    for datatype, multiplier in DATATYPE.items():
        dt_to_read = start
        vmf1_data = dict()

        while dt_to_read <= end:
            file_vars = dict(config.date_vars(dt_to_read), type=datatype)
            data_chunk = parsers.parse_key(file_key="vmf1_grid",
                                           file_vars=file_vars).as_dict()
            if data_chunk:
                vmf1_data[dt_to_read] = (data_chunk["lat"], data_chunk["lon"],
                                         data_chunk["values"] * multiplier)
            dt_to_read += timedelta(hours=6)
        data[datatype] = vmf1_data

    funcs = {k: vmf1_interpolator(v) for k, v in data.items()}

    data = parsers.parse_key(file_key="orography_ell").as_dict()
    funcs["ell"] = RectBivariateSpline(data["lon"], data["lat"],
                                       data["values"].T)
    return funcs
Ejemplo n.º 17
0
def get_eccentricity(rundate):
    """Get Eccentricities for a given date

    Args:
        rundate: The run date of the data analysis.

    Returns:
        A dictionary of eccentricities.
    """

    data = parsers.parse_key(file_key="eccentricity").as_dict()
    return Eccentricity(data, rundate)
Ejemplo n.º 18
0
def get_center_of_mass(sat_name):
    """Read station-dependent center of mass corrections from file

    Args:
        sat_name (String):  Name of satellite.

    Returns:
        Dict:  Center of mass corrections per station for the given satellite.
    """
    satellite = sat_name.rstrip("12")
    parser = parsers.parse_key("slr_center_of_mass",
                               file_vars=dict(satellite=satellite))
    return SlrCenterOfMass(parser.as_dict(), satellite)
Ejemplo n.º 19
0
def get_ocean_tides():
    """Get ocean tidal loading coefficients

    Reads ocean tidal coefficients from file using OceanTidesFes2004Parser for satellitte
    displacements.

    Returns:
        A dictionary with information about ocean tidal coefficients.
    """
    model = config.tech.orbit_ocean_tides.str
    file_key = f"ocean_tides_{model}" if model else "ocean_tides"

    return parsers.parse_key(file_key).as_dict()
Ejemplo n.º 20
0
def get_ocean_tides():
    """Get ocean tidal loading coefficients

    Reads ocean tidal loading from file using OceanTidesParser for station
    displacements.

    Returns:
        A dictionary with information about ocean tidal loading coefficients.
    """
    ocean_tides_model = config.tech.ocean_tides.str
    file_key = "ocean_tides_{}".format(
        ocean_tides_model) if ocean_tides_model else "ocean_tides"

    return parsers.parse_key(file_key=file_key).as_dict()
Ejemplo n.º 21
0
    def _read_data_ngs(self):
        """Read data from NGS observation files

        Returns:
            Dict:  Dictionary containing data about each source defined in this celestial reference frame.
        """
        data = parsers.parse_key("vlbi_obs_ngs",
                                 parser_name="vlbi_ngs_sources").as_dict()
        src_names = apriori.get("vlbi_source_names")

        # Replace IVS name of source with official IERS name
        return {
            src_names[ivsname]["iers_name"]
            if ivsname in src_names else ivsname: coords
            for ivsname, coords in data.items()
        }
Ejemplo n.º 22
0
    def _read_data_vgosdb(self):
        """Read data from vgosdb observation files

        Returns:
            Dict:  Dictionary containing data about each source defined in this celestial reference frame.
        """
        source_names = apriori.get("vlbi_source_names")
        data = parsers.parse_key("vlbi_obs_sources_vgosdb").as_dict()

        # Replace IVS name of source with official IERS name
        return {
            source_names[ivsname]["iers_name"]
            if ivsname in source_names else ivsname: dict(ra=coord[0],
                                                          dec=coord[1])
            for ivsname, coord in zip(data["AprioriSourceList"],
                                      data["AprioriSource2000RaDec"])
        }
Ejemplo n.º 23
0
def get_ephemeris(rundate, sat_name):
    """Get CPF data for a given date

    The ephemeris data is stored in a dictionary with keys site_num (5 digits, e.g. '97401') and subdictionary with
    keys antenna_num (one letter and three digits, e.g. 'S002'). Contains information on site position and sigmas.

    Args:
        rundate (Datetime):   Model run date.
        sat_name (String):    Name of satellite.

    Returns:
        Dict: Ephemeris data.
    """
    file_key = "slr_ephemeris"
    sat_data = get_satellite_vars(sat_name)
    provider_list = config.tech.prediction_providers.list

    # Find the latest version of the observation file
    versions = files.glob_variable(file_key,
                                   "version",
                                   r"\d{4}",
                                   file_vars=sat_data)
    ephemeris_data = dict()

    try:
        ephemeris_data["version"] = sorted(versions)[-1]
        providers = files.glob_variable(file_key,
                                        "provider",
                                        r"\w+",
                                        file_vars=ephemeris_data)
        for provider in provider_list:
            if provider in providers:
                ephemeris_data["provider"] = provider
                break
        else:
            log.fatal(f"No valid provider found: {', '.join(providers)}")
    except IndexError:
        log.info(f"Pattern: '{files.path(file_key)}'")
        log.info(f"No ephemeris data found")
        log.fatal(
            f"Download manually from ftp://cddis.nasa.gov/slr/cpf_predicts/")

    eph_parser = parsers.parse_key(file_key, file_vars=ephemeris_data)
    eph = calculate_initial_values(eph_parser.as_dict(), rundate)

    return eph
Ejemplo n.º 24
0
def get_ephemeris(rundate, sat_name):
    """Get CPF data for a given date

    The ephemeris data is stored in a dictionary with keys site_num (5 digits, e.g. '97401') and subdictionary with
    keys antenna_num (one letter and three digits, e.g. 'S002'). Contains information on site position and sigmas.

    Args:
        rundate (Datetime):   Model run date.
        sat_name (String):    Name of satellite.

    Returns:
        Dict: Ephemeris data.
    """
    file_key = "slr_ephemeris"
    sat_data = get_satellite_vars(sat_name)

    provider_list = config.tech.prediction_providers.list
    # Find the latest version of the observation file
    versions = files.glob_variable(file_key,
                                   "version",
                                   r"\d{4}",
                                   file_vars=sat_data)
    ephemeris_data = dict()

    try:
        ephemeris_data["version"] = sorted(versions)[-1]
        providers = files.glob_variable(file_key,
                                        "provider",
                                        r"\w+",
                                        file_vars=ephemeris_data)
        for provider in provider_list:
            if provider in providers:
                ephemeris_data["provider"] = provider
                break
    except IndexError:
        print(f"Pattern: '{files.path(file_key)}'"
              )  # TODO: Because of format log does not print this properly
        log.fatal(f"No ephemeris data found")

    eph = parsers.parse_key(file_key,
                            file_vars=ephemeris_data,
                            rundate=rundate)
    eph = calculate_initial_values(eph)

    return eph
Ejemplo n.º 25
0
Archivo: eop.py Proyecto: mfkiwl/where
def get_eop(time, models=None, pole_model=None, window=4, sources=None, remove_leap_seconds=None):
    """Get EOP data for the given time epochs

    Args:
        time (Time):   Time epochs for which to calculate EOPs.
        models (Tuple): Optional tuple of EOP models. If not given, the config setting is used.

    Returns:
        Eop: Object that calculates EOP corrections.

    """
    if not _EOP_DATA:

        sources = sources if sources else config.tech.eop_sources.list
        for source in sources:
            _EOP_DATA.setdefault(source, {}).update(parsers.parse_key(file_key=f"eop_{source}").as_dict())

    return Eop(_EOP_DATA, time, models=models, pole_model=pole_model, window=window, sources=sources, remove_leap_seconds=remove_leap_seconds)
Ejemplo n.º 26
0
def get_solar_flux(rundate):
    """Read time-dependent solar flux from file
    """
    flux = parsers.parse_key(file_key="solar_flux").as_dict()
    arc_length = config.tech.arc_length.int

    date_to_add = rundate - timedelta(days=5)
    time_list, flux_list = list(), list()

    while True:
        if date_to_add in flux:
            # Note that 72000 sec corresponds to local noontime in Penticton
            time_list.append((date_to_add - rundate).total_seconds() + 72000)
            flux_list.append(flux[date_to_add])
            if date_to_add > rundate + timedelta(days=arc_length):
                break
        date_to_add += timedelta(days=1)

    return interpolate.interp1d(time_list, flux_list)
Ejemplo n.º 27
0
    def _read_data_vgosdb(self):
        """Read data from vgosdb observation files

        Returns:
            Dict:  Dictionary containing data about each site defined in this reference frame.
        """
        station_codes = apriori.get("vlbi_station_codes")
        data = parsers.parse_key("vlbi_obs_stations_vgosdb").as_dict()
        try:
            stations = data["AprioriStationList"]
            xyz = data["AprioriStationXYZ"]
        except KeyError:
            return {}

        return {(station_codes[n]["cdp"]
                 if n in station_codes else "key{}".format(i)):
                (dict(name=n, pos=p, **station_codes[n])
                 if n in station_codes else dict(name=n, pos=p, real=False))
                for i, (n, p) in enumerate(zip(stations, xyz))}
Ejemplo n.º 28
0
def get_ocean_pole_tides():
    """Get ocean pole tide coefficients

    Read coefficients from file using OceanPoleTidesParser and construct a
    nearest-neighbour interpolator for each set of coefficients. Use longtitude
    and latitude as data point coordinates for the interpolator functions.

    Longtitude is given from 0 to 360 degrees, while the interpolator should
    work for longtidues from -180 to 180 degrees. The dataset is therefore
    shifted accordingly.

    Returns:
        A dictionary of interpolator functions.
    """
    opt_data = parsers.parse_key(file_key="ocean_pole_tides_cmc").as_dict()

    interpolators = dict()
    lon = np.array(opt_data.pop("lon"))
    lat = np.array(opt_data.pop("lat"))

    lon = np.unique(lon)
    lat = np.unique(lat)

    num_value = (len(lat), len(lon))

    # Shift longtitude -180 degrees
    lon = (lon + 180) % 360 - 180
    idx = lon.argsort()
    lon = lon[idx]

    lat = np.radians(lat)
    lon = np.radians(lon)

    for coeff in opt_data.keys():
        values = np.array(opt_data[coeff]).reshape(num_value)

        # Shift longtitude -180 degrees
        values = values[:, idx]

        interpolators[coeff] = RectBivariateSpline(lon, lat, values.T)

    return interpolators
Ejemplo n.º 29
0
def write_to_dataset(dset,
                     rundate=None,
                     session=None,
                     obs_format=None,
                     **obs_args):
    obs_format = config.tech.get("obs_format", section=TECH,
                                 value=obs_format).str
    log.info(f"Reading observation file in {obs_format} format")

    file_vars = config.create_file_vars(rundate,
                                        TECH,
                                        session=session,
                                        **obs_args)
    parser = parsers.parse_key(f"vlbi_obs_{obs_format}", file_vars)

    if parser.data_available:
        _write_to_dataset(parser, dset, rundate, session)
    else:
        raise exceptions.MissingDataError(
            f"No observation file in {obs_format} format found for {rundate}")
Ejemplo n.º 30
0
def get_eop(time, models=None, window=4, source=None):
    """Get EOP data for the given time epochs

    Read EOP data from the eopc04-files. Both files are read, with data from the regular IAU file
    (eopc04_IAU2000.62-now) being prioritized, see `_EOP_FILE_KEYS`.

    Args:
        time (Time):   Time epochs for which to calculate EOPs.
        models (Tuple): Optional tuple of EOP models. If not given, the config setting is used.

    Returns:
        Eop: Object that calculates EOP corrections.

    """
    # Read the extended and the regular EOP data file (overlapping dates are overwritten by the latter)
    if not _EOP_DATA:
        source = config.tech.get("eop_source", value=source).str
        for file_key in _EOP_FILE_KEYS[source]:
            _EOP_DATA.update(parsers.parse_key(file_key=file_key).as_dict())

    return Eop(_EOP_DATA, time, models=models, window=window)