Example #1
0
def populate_data(dog, start_date, duration, stations):
    station_locs = {}
    station_data = {}
    for station in stations:
        print(station)
        cache_name = "cached/stationdat_%s_%s_to_%s" % (
            station, start_date.strftime("%Y-%m-%d"),
            (start_date + duration).strftime("%Y-%m-%d"))
        if os.path.exists(cache_name):
            station_data[station] = pickle.load(open(cache_name, "rb"))
            station_locs[station] = get_station_position(
                station, cache_dir=dog.cache_dir)
            continue

        station_data[station] = {
            'G%02d' % i: defaultdict(empty_factory)
            for i in range(1, 33)
        }
        date = start_date
        while date < start_date + duration:
            try:

                loc, data = data_for_station(dog, station, date)
                station_data[station] = station_transform(
                    data,
                    start_dict=station_data[station],
                    offset=int((date - start_date).total_seconds() / 30))
                station_locs[station] = loc
            except (ValueError, DownloadError):
                print("*** error with station " + station)
            date += timedelta(days=1)
        os.makedirs("cached", exist_ok=True)
        pickle.dump(station_data[station], open(cache_name, "wb"))
    return station_locs, station_data
Example #2
0
    def test_station_position(self):
        print(
            'WARNING THIS TAKE CAN TAKE A VERY LONG TIME THE FIRST RUN TO DOWNLOAD'
        )
        dog = AstroDog()
        # Building this cache takes forever just copy it from repo
        cache_directory = '/tmp/gnss/cors_coord/'
        try:
            os.mkdir('/tmp/gnss/')
        except OSError:
            pass

        try:
            os.mkdir(cache_directory)
        except OSError:
            pass

        examples_directory = os.path.join(
            os.path.dirname(os.path.abspath(__file__)), '../examples')
        copyfile(os.path.join(examples_directory, 'cors_station_positions'),
                 os.path.join(cache_directory, 'cors_station_positions'))

        station_name = 'sc01'
        time = GPSTime.from_datetime(datetime(2020, 1, 11))
        slac_rinex_obs_file = download_cors_station(time, station_name,
                                                    dog.cache_dir)
        obs_data = RINEXFile(slac_rinex_obs_file)
        sc01_exact_position = get_station_position('sc01')

        rinex_meas_grouped = raw.read_rinex_obs(obs_data)
        rinex_corr_grouped = []
        for meas in tqdm(rinex_meas_grouped):
            proc = raw.process_measurements(meas, dog=dog)
            corr = raw.correct_measurements(meas, sc01_exact_position, dog=dog)
            rinex_corr_grouped.append(corr)

        # Using laika's WLS solver we can now calculate position
        # fixes for every epoch (every 30s) over 24h.
        ests = []
        for corr in tqdm(rinex_corr_grouped[:]):
            fix, _ = raw.calc_pos_fix(corr)
            ests.append(fix)
        ests = np.array(ests)

        mean_fix = np.mean(ests[:, :3], axis=0)
        np.testing.assert_allclose(mean_fix,
                                   sc01_exact_position,
                                   rtol=0,
                                   atol=1)
Example #3
0
def data_for_station(dog, station_name, date=None):
    """
    Get data from a particular station and time.
    Station names are CORS names (eg: 'slac')
    Dates are datetimes (eg: datetime(2020,1,7))
    """

    if date is None:
        date = datetime(2020,1,7)
    time = GPSTime.from_datetime(date)
    rinex_obs_file = download_cors_station(time, station_name, dog.cache_dir)

    obs_data = RINEXFile(rinex_obs_file)
    station_pos = get_station_position(station_name)
    return station_pos, raw.read_rinex_obs(obs_data)
Example #4
0
    def run_station_position(self, length):
        dog = AstroDog()
        # Building this cache takes forever just copy it from repo
        cache_directory = '/tmp/gnss/cors_coord/'
        os.makedirs('/tmp/gnss/', exist_ok=True)
        os.makedirs(cache_directory, exist_ok=True)

        examples_directory = os.path.join(
            os.path.dirname(os.path.abspath(__file__)), '../examples')
        copyfile(os.path.join(examples_directory, 'cors_station_positions'),
                 os.path.join(cache_directory, 'cors_station_positions'))

        station_name = 'sc01'
        time = GPSTime.from_datetime(datetime(2020, 1, 11))
        slac_rinex_obs_file = download_cors_station(time, station_name,
                                                    dog.cache_dir)
        obs_data = RINEXFile(slac_rinex_obs_file)
        sc01_exact_position = get_station_position('sc01')

        rinex_meas_grouped = raw.read_rinex_obs(obs_data)
        # Select small sample out of ~2800 to reduce computation time
        rinex_meas_grouped = rinex_meas_grouped[:length]
        rinex_corr_grouped = []
        for meas in tqdm(rinex_meas_grouped):
            proc = raw.process_measurements(meas, dog=dog)
            corr = raw.correct_measurements(proc, sc01_exact_position, dog=dog)
            rinex_corr_grouped.append(corr)

        # Using laika's WLS solver we can now calculate position
        # fixes for every epoch (every 30s) over 24h.
        ests = []
        for corr in tqdm(rinex_corr_grouped):
            ret = raw.calc_pos_fix(corr)
            if len(ret) > 0:
                fix, _ = ret
                ests.append(fix)
        ests = np.array(ests)

        mean_fix = np.mean(ests[:, :3], axis=0)
        np.testing.assert_allclose(mean_fix,
                                   sc01_exact_position,
                                   rtol=0,
                                   atol=1)
Example #5
0
    def populate_data(self):
        self._station_locs = {}
        self._station_data = {}
        bad_stations = []
        for station in self.stations:
            print(station)
            cache_name = "cached/stationdat_%s_%s_to_%s" % (
                station, self.start_date.strftime("%Y-%m-%d"),
                (self.start_date + self.duration).strftime("%Y-%m-%d"))
            if os.path.exists(cache_name):
                self.station_data[station] = pickle.load(open(
                    cache_name, "rb"))
                try:
                    self.station_locs[station] = get_station_position(
                        station, cache_dir=self.dog.cache_dir)
                except KeyError:
                    self.station_locs[station] = numpy.array(
                        extra_station_info[station])
                continue

            self.station_data[station] = {
                prn: defaultdict(empty_factory)
                for prn in satellites
            }
            date = self.start_date
            while date < self.start_date + self.duration:
                try:
                    loc, data = data_for_station(self.dog, station, date)
                    self.station_data[station] = station_transform(
                        data,
                        start_dict=self.station_data[station],
                        offset=int(
                            (date - self.start_date).total_seconds() / 30))
                    self.station_locs[station] = loc
                except (ValueError, DownloadError):
                    print("*** error with station " + station)
                    bad_stations.append(station)
                date += timedelta(days=1)
            os.makedirs("cached", exist_ok=True)
            pickle.dump(self.station_data[station], open(cache_name, "wb"))
        for bad_station in bad_stations:
            self.stations.remove(bad_station)
Example #6
0
def data_for_station(dog, station_name, date):
    """
    Get data from a particular station and time. Wraps a number of laika function calls.
    Station names are CORS names (eg: 'slac')
    Dates are datetimes (eg: datetime(2020,1,7))
    """
    time = GPSTime.from_datetime(date)
    rinex_obs_file = None

    # handlers for specific networks
    handlers = {'Korea': download_korean_station}

    network = station_network_info.get(station_name, None)

    # no special network, so try using whatever
    if network is None:
        try:
            station_pos = get_station_position(station_name,
                                               cache_dir=dog.cache_dir)
            rinex_obs_file = download_cors_station(time,
                                                   station_name,
                                                   cache_dir=dog.cache_dir)
        except (KeyError, DownloadError):
            pass

        if not rinex_obs_file:
            # station position not in CORS map, try another thing
            if station_name in extra_station_info:
                station_pos = numpy.array(extra_station_info[station_name])
                rinex_obs_file = download_misc_igs_station(
                    time, station_name, cache_dir=dog.cache_dir)
            else:
                raise DownloadError

    else:
        station_pos = numpy.array(extra_station_info[station_name])
        rinex_obs_file = handlers[network](time,
                                           station_name,
                                           cache_dir=dog.cache_dir)

    obs_data = RINEXFile(rinex_obs_file, rate=30)
    return station_pos, raw.read_rinex_obs(obs_data)
Example #7
0
def populate_data(dog, start_date, duration, stations):
    '''
    Uses laika to retrieve station data for a particular time interval and set of stations.
    :param dog: laika AstroDog object
    :param start_date: python datetime object
    :param duration: python timedelta object
    :param stations: python list of strings representing individual stations.
    :return:
    '''
    station_locs = {}
    station_data = {}
    for station in stations:
        print(station)
        cache_name = "cached/stationdat_%s_%s_to_%s" % (
            station, start_date.strftime("%Y-%m-%d"),
            (start_date + duration).strftime("%Y-%m-%d"))
        if os.path.exists(cache_name):
            station_data[station] = pickle.load(open(cache_name, "rb"))
            station_locs[station] = get_station_position(
                station, cache_dir=dog.cache_dir)
            continue

        station_data[station] = {
            'G%02d' % i: defaultdict(empty_factory)
            for i in range(1, 33)
        }
        date = start_date
        while date < start_date + duration:
            try:
                loc, data = data_for_station(dog, station, date)
                station_data[station] = station_transform(
                    data,
                    start_dict=station_data[station],
                    offset=int((date - start_date).total_seconds() / 30))
                station_locs[station] = loc
            except (ValueError, DownloadError):
                print("*** error with station " + station)
            date += timedelta(days=1)
        os.makedirs("cached", exist_ok=True)
        pickle.dump(station_data[station], open(cache_name, "wb"))
    return station_locs, station_data