Exemplo n.º 1
0
 def _write_body(self):
     # logger.debug('generating body')
     vi = self.ssp.cur.proc_valid
     for idx in range(np.sum(vi)):
         self.fod.io.write(
             "%8.2f%10.2f%10.2f%10.2f%10.2f\n" %
             (self.ssp.cur.proc.depth[vi][idx],
              self.ssp.cur.proc.speed[vi][idx],
              self.ssp.cur.proc.temp[vi][idx],
              self.ssp.cur.proc.sal[vi][idx],
              Oc.s2c(s=self.ssp.cur.proc.sal[vi][idx],
                     p=Oc.d2p(d=self.ssp.cur.proc.depth[vi][idx],
                              lat=self.ssp.cur.meta.latitude),
                     t=self.ssp.cur.proc.temp[vi][idx])))
Exemplo n.º 2
0
    def query(self,
              lat: Optional[float],
              lon: Optional[float],
              datestamp: Union[date, dt, None] = None,
              server_mode: bool = False):
        """Query RTOFS for passed location and timestamp"""
        if datestamp is None:
            datestamp = dt.utcnow()
        if isinstance(datestamp, dt):
            datestamp = datestamp.date()
        if not isinstance(datestamp, date):
            raise RuntimeError("invalid date passed: %s" % type(datestamp))
        logger.debug("query: %s @ (%.6f, %.6f)" % (datestamp, lon, lat))

        # check the inputs
        if (lat is None) or (lon is None) or (datestamp is None):
            logger.error("invalid query: %s @ (%s, %s)" %
                         (datestamp.strftime("%Y%m%d"), lon, lat))
            return None

        try:
            lat_idx, lon_idx = self.grid_coords(lat,
                                                lon,
                                                datestamp=datestamp,
                                                server_mode=server_mode)
            if lat_idx is None:
                logger.info("location outside of GoMOFS coverage")
                return None

        except TypeError as e:
            logger.critical("while converting location to grid coords, %s" % e)
            return None

        logger.debug("idx > lat: %s, lon: %s" % (lat_idx, lon_idx))
        lat_s_idx = lat_idx - self._search_half_window
        if lat_s_idx < 0:
            lat_s_idx = 0
        lat_n_idx = lat_idx + self._search_half_window
        if lat_n_idx >= self._lat.shape[0]:
            lat_n_idx = self._lat.shape[0] - 1
        lon_w_idx = lon_idx - self._search_half_window
        if lon_w_idx < 0:
            lon_w_idx = 0
        lon_e_idx = lon_idx + self._search_half_window
        if lon_e_idx >= self._lon.shape[1]:
            lon_e_idx = self._lon.shape[1] - 1
        # logger.info("indices -> %s %s %s %s" % (lat_s_idx, lat_n_idx, lon_w_idx, lon_e_idx))
        lat_search_window = lat_n_idx - lat_s_idx + 1
        lon_search_window = lon_e_idx - lon_w_idx + 1
        logger.info("updated search window: (%s, %s)" %
                    (lat_search_window, lon_search_window))

        # Need +1 on the north and east indices since it is the "stop" value in these slices
        t = self._file.variables['temp'][self._day_idx, :,
                                         lat_s_idx:lat_n_idx + 1,
                                         lon_w_idx:lon_e_idx + 1]
        s = self._file.variables['salt'][self._day_idx, :,
                                         lat_s_idx:lat_n_idx + 1,
                                         lon_w_idx:lon_e_idx + 1]
        # Set 'unfilled' elements to NANs (BUT when the entire array has valid data, it returns numpy.ndarray)
        if isinstance(t, np.ma.core.MaskedArray):
            t_mask = t.mask
            t._sharedmask = False
            t[t_mask] = np.nan
        if isinstance(s, np.ma.core.MaskedArray):
            s_mask = s.mask
            s._sharedmask = False
            s[s_mask] = np.nan

        # Calculate distances from requested position to each of the grid node locations
        distances = np.zeros(
            (self._d.size, lon_search_window, lat_search_window))
        longitudes = self._lon[lat_s_idx:lat_n_idx + 1,
                               lon_w_idx:lon_e_idx + 1]
        latitudes = self._lat[lat_s_idx:lat_n_idx + 1, lon_w_idx:lon_e_idx + 1]

        for i in range(lat_search_window):

            for j in range(lon_search_window):
                dist = self.g.distance(longitudes[i, j], latitudes[i, j], lon,
                                       lat)
                distances[:, i, j] = dist
                # logger.info("node (%s %s), pos: %3.2f, %3.2f, dist: %3.1f"
                #             % (i, j, latitudes[i, j], longitudes[i, j], distances[0, i, j]))

        # Get mask of "no data" elements and replace these with NaNs in distance array
        t_mask = np.isnan(t)
        distances[t_mask] = np.nan
        s_mask = np.isnan(s)
        distances[s_mask] = np.nan
        # logger.info("distance array:\n%s" % distances[0])

        # Spin through all the depth levels
        temp_pot = np.zeros(self._d.size)
        temp_in_situ = np.zeros(self._d.size)
        d = np.zeros(self._d.size)
        sal = np.zeros(self._d.size)
        num_values = 0
        for i in range(self._d.size):

            t_level = t[i]
            s_level = s[i]
            d_level = distances[i]

            try:
                ind = np.nanargmin(d_level)
            except ValueError:
                # logger.info("%s: all-NaN slices" % i)
                continue

            if np.isnan(ind):
                logger.info("%s: bottom of valid data" % i)
                break

            ind2 = np.unravel_index(ind, t_level.shape)

            t_closest = t_level[ind2]
            s_closest = s_level[ind2]
            # d_closest = d_level[ind2]

            temp_pot[i] = t_closest
            sal[i] = s_closest
            d[i] = self._d[i]

            # Calculate in-situ temperature
            p = Oc.d2p(d[i], lat)
            temp_in_situ[i] = Oc.in_situ_temp(s=sal[i],
                                              t=t_closest,
                                              p=p,
                                              pr=self._ref_p)
            # logger.info("%02d: %6.1f %6.1f > T/S/Dist: %3.1f %3.1f %3.1f [pot.temp. %3.1f]"
            #             % (i, d[i], p, temp_in_situ[i], s_closest, d_closest, t_closest))

            num_values += 1

        if num_values == 0:
            logger.info("no data from lookup!")
            return None

        # ind = np.nanargmin(distances[0])
        # ind2 = np.unravel_index(ind, distances[0].shape)
        # switching to the query location
        # lat_out = latitudes[ind2]
        # lon_out = longitudes[ind2]
        # while lon_out > 180.0:
        #     lon_out -= 360.0

        # Make a new SV object to return our query in
        ssp = Profile()
        ssp.meta.sensor_type = Dicts.sensor_types['Synthetic']
        ssp.meta.probe_type = Dicts.probe_types['GoMOFS']
        ssp.meta.latitude = lat
        if lon > 180.0:  # Go back to negative longitude
            lon -= 360.0
        ssp.meta.longitude = lon
        ssp.meta.utc_time = dt(year=datestamp.year,
                               month=datestamp.month,
                               day=datestamp.day)
        ssp.meta.original_path = "GoMOFS_%s" % datestamp.strftime("%Y%m%d")
        ssp.init_data(num_values)
        ssp.data.depth = d[0:num_values]
        ssp.data.temp = temp_in_situ[0:num_values]
        ssp.data.sal = sal[0:num_values]
        ssp.calc_data_speed()
        ssp.clone_data_to_proc()
        ssp.init_sis()

        profiles = ProfileList()
        profiles.append_profile(ssp)

        return profiles
Exemplo n.º 3
0
    def query(self, lat: Optional[float], lon: Optional[float], dtstamp: Optional[dt] = None,
              server_mode: bool = False):
        """Query RTOFS for passed location and timestamp"""
        if dtstamp is None:
            dtstamp = dt.utcnow()
        if not isinstance(dtstamp, dt):
            raise RuntimeError("invalid datetime passed: %s" % type(dtstamp))
        logger.debug("query: %s @ (%.6f, %.6f)" % (dtstamp, lon, lat))

        # check the inputs
        if (lat is None) or (lon is None):
            logger.error("invalid query: %s @ (%s, %s)" % (dtstamp.strftime("%Y/%m/%d %H:%M:%S"), lon, lat))
            return None

        try:
            lat_idx, lon_idx = self.grid_coords(lat, lon, dtstamp=dtstamp, server_mode=server_mode)
        except TypeError as e:
            logger.critical("while converting location to grid coords, %s" % e)
            return None
        # logger.debug("idx > lat: %s, lon: %s" % (lat_idx, lon_idx))

        lat_s_idx = lat_idx - self._search_half_window
        lat_n_idx = lat_idx + self._search_half_window
        lon_w_idx = lon_idx - self._search_half_window
        lon_e_idx = lon_idx + self._search_half_window
        # logger.info("indices -> %s %s %s %s" % (lat_s_idx, lat_n_idx, lon_w_idx, lon_e_idx))
        if lon < self._lon_0:  # Make all longitudes safe
            lon += 360.0

        longitudes = np.zeros((self._search_window, self._search_window))
        if (lon_e_idx < self._lon.size) and (lon_w_idx >= 0):
            # logger.info("safe case")

            # Need +1 on the north and east indices since it is the "stop" value in these slices
            t = self._file_temp.variables['temperature'][self._day_idx, :, lat_s_idx:lat_n_idx + 1,
                lon_w_idx:lon_e_idx + 1]
            s = self._file_sal.variables['salinity'][self._day_idx, :, lat_s_idx:lat_n_idx + 1, lon_w_idx:lon_e_idx + 1]
            # Set 'unfilled' elements to NANs (BUT when the entire array has valid data, it returns numpy.ndarray)
            if isinstance(t, np.ma.core.MaskedArray):
                t_mask = t.mask
                t._sharedmask = False
                t[t_mask] = np.nan
            if isinstance(s, np.ma.core.MaskedArray):
                s_mask = s.mask
                s._sharedmask = False
                s[s_mask] = np.nan

            lons = self._lon[lon_w_idx:lon_e_idx + 1]
            for i in range(self._search_window):
                longitudes[i, :] = lons
        else:
            logger.info("split case")

            # --- Do the left portion of the array first, this will run into the wrap longitude
            lon_e_idx = self._lon.size - 1
            # lon_west_index can be negative if lon_index is on the westernmost end of the array
            if lon_w_idx < 0:
                lon_w_idx = lon_w_idx + self._lon.size
            # logger.info("using lon west/east indices -> %s %s" % (lon_w_idx, lon_e_idx))

            # Need +1 on the north and east indices since it is the "stop" value in these slices
            t_left = self._file_temp.variables['temperature'][self._day_idx, :, lat_s_idx:lat_n_idx + 1,
                     lon_w_idx:lon_e_idx + 1]
            s_left = self._file_sal.variables['salinity'][self._day_idx, :, lat_s_idx:lat_n_idx + 1,
                     lon_w_idx:lon_e_idx + 1]
            # Set 'unfilled' elements to NANs (BUT when the entire array has valid data, it returns numpy.ndarray)
            if isinstance(t_left, np.ma.core.MaskedArray):
                t_mask = t_left.mask
                t_left[t_mask] = np.nan
            if isinstance(s_left, np.ma.core.MaskedArray):
                s_mask = s_left.mask
                s_left[s_mask] = np.nan

            lons_left = self._lon[lon_w_idx:lon_e_idx + 1]
            for i in range(self._search_window):
                longitudes[i, 0:lons_left.size] = lons_left
            # logger.info("longitudes are now: %s" % longitudes)

            # --- Do the right portion of the array first, this will run into the wrap
            # longitude so limit it accordingly
            lon_w_idx = 0
            lon_e_idx = self._search_window - lons_left.size - 1

            # Need +1 on the north and east indices since it is the "stop" value in these slices
            t_right = self._file_temp.variables['temperature'][self._day_idx, :, lat_s_idx:lat_n_idx + 1,
                      lon_w_idx:lon_e_idx + 1]
            s_right = self._file_sal.variables['salinity'][self._day_idx, :, lat_s_idx:lat_n_idx + 1,
                      lon_w_idx:lon_e_idx + 1]
            # Set 'unfilled' elements to NANs (BUT when the entire array has valid data, it returns numpy.ndarray)
            if isinstance(t_right, np.ma.core.MaskedArray):
                t_mask = t_right.mask
                t_right[t_mask] = np.nan
            if isinstance(s_right, np.ma.core.MaskedArray):
                s_mask = s_right.mask
                s_right[s_mask] = np.nan

            lons_right = self._lon[lon_w_idx:lon_e_idx + 1]
            for i in range(self._search_window):
                longitudes[i, lons_left.size:self._search_window] = lons_right

            # merge data
            t = np.zeros((self._file_temp.variables['lev'].size, self._search_window, self._search_window))
            t[:, :, 0:lons_left.size] = t_left
            t[:, :, lons_left.size:self._search_window] = t_right
            s = np.zeros((self._file_temp.variables['lev'].size, self._search_window, self._search_window))
            s[:, :, 0:lons_left.size] = s_left
            s[:, :, lons_left.size:self._search_window] = s_right

        # Calculate distances from requested position to each of the grid node locations
        distances = np.zeros((self._d.size, self._search_window, self._search_window))
        latitudes = np.zeros((self._search_window, self._search_window))
        lats = self._lat[lat_s_idx:lat_n_idx + 1]
        for i in range(self._search_window):
            latitudes[:, i] = lats

        for i in range(self._search_window):

            for j in range(self._search_window):
                dist = self.g.distance(longitudes[i, j], latitudes[i, j], lon, lat)
                distances[:, i, j] = dist
                # logger.info("node %s, pos: %3.1f, %3.1f, dist: %3.1f"
                #             % (i, latitudes[i, j], longitudes[i, j], distances[0, i, j]))
        # logger.info("distance array:\n%s" % distances[0])
        # Get mask of "no data" elements and replace these with NaNs in distance array
        t_mask = np.isnan(t)
        distances[t_mask] = np.nan
        s_mask = np.isnan(s)
        distances[s_mask] = np.nan

        # Spin through all the depth levels
        temp_pot = np.zeros(self._d.size)
        temp_in_situ = np.zeros(self._d.size)
        d = np.zeros(self._d.size)
        sal = np.zeros(self._d.size)
        num_values = 0
        for i in range(self._d.size):

            t_level = t[i]
            s_level = s[i]
            d_level = distances[i]

            try:
                ind = np.nanargmin(d_level)
            except ValueError:
                # logger.info("%s: all-NaN slices" % i)
                continue

            if np.isnan(ind):
                logger.info("%s: bottom of valid data" % i)
                break

            ind2 = np.unravel_index(ind, t_level.shape)

            t_closest = t_level[ind2]
            s_closest = s_level[ind2]
            # d_closest = d_level[ind2]

            temp_pot[i] = t_closest
            sal[i] = s_closest
            d[i] = self._d[i]

            # Calculate in-situ temperature
            p = Oc.d2p(d[i], lat)
            temp_in_situ[i] = Oc.in_situ_temp(s=sal[i], t=t_closest, p=p, pr=self._ref_p)
            # logger.info("%02d: %6.1f %6.1f > T/S/Dist: %3.1f %3.1f %3.1f [pot.temp. %3.1f]"
            #             % (i, d[i], p, temp_in_situ[i], s_closest, d_closest, t_closest))

            num_values += 1

        if num_values == 0:
            logger.info("no data from lookup!")
            return None

        # ind = np.nanargmin(distances[0])
        # ind2 = np.unravel_index(ind, distances[0].shape)
        # switching to the query location
        # lat_out = latitudes[ind2]
        # lon_out = longitudes[ind2]
        # while lon_out > 180.0:
        #     lon_out -= 360.0

        # Make a new SV object to return our query in
        ssp = Profile()
        ssp.meta.sensor_type = Dicts.sensor_types['Synthetic']
        ssp.meta.probe_type = Dicts.probe_types['RTOFS']
        ssp.meta.latitude = lat
        if lon > 180.0:  # Go back to negative longitude
            lon -= 360.0
        ssp.meta.longitude = lon
        ssp.meta.utc_time = dt(year=dtstamp.year, month=dtstamp.month, day=dtstamp.day,
                               hour=dtstamp.hour, minute=dtstamp.minute, second=dtstamp.second)
        ssp.meta.original_path = "RTOFS_%s" % dtstamp.strftime("%Y%m%d_%H%M%S")
        ssp.init_data(num_values)
        ssp.data.depth = d[0:num_values]
        ssp.data.temp = temp_in_situ[0:num_values]
        ssp.data.sal = sal[0:num_values]
        ssp.calc_data_speed()
        ssp.clone_data_to_proc()
        ssp.init_sis()

        profiles = ProfileList()
        profiles.append_profile(ssp)

        return profiles