Beispiel #1
0
    def query(self, lat: float, lon: float, datestamp: Union[date, dt, None] = None, server_mode: bool = False):
        """Query WOA13 for passed location and timestamp"""
        if datestamp is None:
            datestamp = dt.utcnow()
        if isinstance(datestamp, dt):
            datestamp = datestamp.date()
        if not isinstance(datestamp, date):
            raise RuntimeError("invalid date passed: %s" % type(datestamp))
        logger.debug("query: %s @ (%.6f, %.6f)" % (datestamp, lon, lat))

        # check the inputs
        if (lat is None) or (lon is None) or (datestamp is None):
            logger.error("invalid query: %s @ (%s, %s)" % (datestamp.strftime("%Y%m%d"), lon, lat))
            return None

        if not self.has_data_loaded:
            if not self.load_grids():
                logger.error("No data")
                return None

        self.calc_indices(month=datestamp.month)

        # Find the nearest grid node
        lat_base_idx, lon_base_idx = self.grid_coords(lat=lat, lon=lon)
        lat_offsets = range(lat_base_idx - self.search_radius, lat_base_idx + self.search_radius + 1)
        lon_offsets = range(lon_base_idx - self.search_radius, lon_base_idx + self.search_radius + 1)

        # Search nodes surrounding the requested position to find the closest non-land
        t = np.zeros(self.num_levels)
        s = np.zeros(self.num_levels)
        t_min = np.zeros(self.num_levels)
        s_min = np.zeros(self.num_levels)
        t_max = np.zeros(self.num_levels)
        s_max = np.zeros(self.num_levels)
        dist_arr = np.zeros(self.num_levels)
        dist_t_sd = np.zeros(self.num_levels)
        dist_s_sd = np.zeros(self.num_levels)
        dist_arr[:] = 99999999
        dist_t_sd[:] = 99999999
        dist_s_sd[:] = 99999999
        min_dist = 999999999
        num_visited = 0
        # These keep track of the closest node found, this will also
        # be used to populate lat/lon of the cast to be delivered
        lat_idx = -1
        lon_idx = -1
        for this_lat_index in lat_offsets:
            for this_lon_index in lon_offsets:

                if this_lon_index >= self.lon.size:
                    # logger.debug("[%s] >= [%s]" % (this_lon_index, self.t[self.month_idx].variables['lon'].size))
                    this_lon_index -= self.lat.size

                # Check to see if we're at sea or on land
                if self.landsea[this_lat_index][this_lon_index] == 1:
                    # logger.debug("at land: %s, %s" % (this_lat_index, this_lon_index))
                    # from matplotlib import pyplot
                    # pyplot.imshow(self.landsea, origin='lower')
                    # pyplot.scatter([this_lon_index], [this_lat_index], c='r', s=40)
                    # pyplot.show()

                    continue

                # calculate the distance to the grid node
                dist = self.g.distance(lon, lat, self.lon[this_lon_index], self.lat[this_lat_index])
                # logger.debug("[%s %s] [%s %s]" % (self.lon.shape, self.lat.shape, this_lon_index, this_lat_index))
                # logger.debug("dist: %s" % dist)

                # Keep track of the closest valid grid node to report the pseudo-cast position
                if dist < min_dist:
                    min_dist = dist
                    lat_idx = this_lat_index
                    lon_idx = this_lon_index

                # Extract monthly temperature and salinity profile for this location
                t_profile = self.t[self.month_idx].variables['t_an'][0, :, this_lat_index, this_lon_index]
                s_profile = self.s[self.month_idx].variables['s_an'][0, :, this_lat_index, this_lon_index]
                # Extract seasonal temperature and salinity profile for this location
                t_profile2 = self.t[self.season_idx].variables['t_an'][0, :, this_lat_index, this_lon_index]
                s_profile2 = self.s[self.season_idx].variables['s_an'][0, :, this_lat_index, this_lon_index]

                # Now do the same for the standard deviation profiles
                t_sd_profile = self.t[self.month_idx].variables['t_sd'][0, :, this_lat_index, this_lon_index]
                s_sd_profile = self.s[self.month_idx].variables['s_sd'][0, :, this_lat_index, this_lon_index]
                t_sd_profile2 = self.t[self.season_idx].variables['t_sd'][0, :, this_lat_index, this_lon_index]
                s_sd_profile2 = self.s[self.season_idx].variables['s_sd'][0, :, this_lat_index, this_lon_index]

                # Overwrite the top of the seasonal profiles with the monthly profiles
                t_profile2[0:t_profile.size] = t_profile
                s_profile2[0:s_profile.size] = s_profile
                t_sd_profile2[0:t_sd_profile.size] = t_sd_profile
                s_sd_profile2[0:s_sd_profile.size] = s_sd_profile

                # For each element in the profile, only keep those whose distance is closer than values
                # found from previous iterations (maintain the closest value at each depth level)
                # logger.debug("profile sz: %d\n%s" % (t_profile2.size, t_profile2))
                for i in range(t_profile2.size):
                    if dist >= dist_arr[i]:
                        continue
                    if (t_profile2[i] < 50.0) and (s_profile2[i] < 500.0) and (s_profile2[i] >= 0):
                        t[i] = t_profile2[i]
                        s[i] = s_profile2[i]
                        dist_arr[i] = dist

                # Now do the same thing for the temperature standard deviations
                for i in range(t_sd_profile2.size):
                    if dist >= dist_t_sd[i]:
                        continue
                    if (t_sd_profile2[i] < 50.0) and (t_sd_profile2[i] > -2):
                        t_min[i] = t_profile2[i] - t_sd_profile2[i]
                        if t_min[i] < -2.0:  # can't have overly cold water
                            t_min[i] = -2.0
                        t_max[i] = t_profile2[i] + t_sd_profile2[i]
                        dist_t_sd[i] = dist

                # Now do the same thing for the salinity standard deviations
                for i in range(s_sd_profile2.size):
                    if dist >= dist_s_sd[i]:
                        continue
                    if (s_sd_profile2[i] < 500.0) and (s_sd_profile2[i] >= 0):
                        s_min[i] = s_profile2[i] - s_sd_profile2[i]
                        if s_min[i] < 0:  # Can't have a negative salinity
                            s_min[i] = 0
                        s_max[i] = s_profile2[i] + s_sd_profile2[i]
                        dist_s_sd[i] = dist

                num_visited += 1

        if (lat_idx == -1) and (lon_idx == -1):
            logger.info("possible request on land")
            return None

        valid = dist_arr != 99999999
        num_values = t[valid].size
        logger.debug("valid: %s" % num_values)

        if lon > 180.0:  # Go back to negative longitude
            lon -= 360.0

        # populate output profiles
        ssp = Profile()
        ssp.meta.sensor_type = Dicts.sensor_types['Synthetic']
        ssp.meta.probe_type = Dicts.probe_types['WOA13']
        ssp.meta.latitude = lat
        ssp.meta.longitude = lon
        ssp.meta.utc_time = dt(year=datestamp.year, month=datestamp.month, day=datestamp.day)
        ssp.init_data(num_values)
        ssp.data.depth = self.t[self.season_idx].variables['depth'][0:num_values]
        ssp.data.temp = t[valid]
        ssp.data.sal = s[valid]
        ssp.calc_data_speed()
        ssp.clone_data_to_proc()
        ssp.init_sis()

        # - min/max
        # Isolate realistic values
        for i in range(t_min.size):

            if dist_t_sd[i] == 99999999 or dist_s_sd[i] == 99999999:
                num_values = i
                break

        # -- min
        ssp_min = Profile()
        ssp_min.meta.sensor_type = Dicts.sensor_types['Synthetic']
        ssp_min.meta.probe_type = Dicts.probe_types['WOA13']
        ssp_min.meta.latitude = lat
        ssp_min.meta.longitude = lon
        ssp_min.meta.utc_time = dt(year=datestamp.year, month=datestamp.month, day=datestamp.day)
        if num_values > 0:
            ssp_min.init_data(num_values)
            ssp_min.data.depth = self.t[self.season_idx].variables['depth'][0:num_values]
            ssp_min.data.temp = t_min[valid][0:num_values]
            ssp_min.data.sal = s_min[valid][0:num_values]
            ssp_min.calc_data_speed()
            ssp_min.clone_data_to_proc()
            ssp_min.init_sis()
        else:
            ssp_min = None
        # -- max
        ssp_max = Profile()
        ssp_max.meta.sensor_type = Dicts.sensor_types['Synthetic']
        ssp_max.meta.probe_type = Dicts.probe_types['WOA13']
        ssp_max.meta.latitude = lat
        ssp_max.meta.longitude = lon
        ssp_max.meta.utc_time = dt(year=datestamp.year, month=datestamp.month, day=datestamp.day)
        ssp.meta.original_path = "WOA13_%s" % datestamp.strftime("%Y%m%d")
        if num_values > 0:
            ssp_max.init_data(num_values)
            ssp_max.data.depth = self.t[self.season_idx].variables['depth'][0:num_values].astype(np.float64)
            ssp_max.data.temp = t_max[valid][0:num_values]
            ssp_max.data.sal = s_max[valid][0:num_values]
            ssp_max.calc_data_speed()
            ssp_max.clone_data_to_proc()
            ssp_max.init_sis()
        else:
            ssp_max = None

        profiles = ProfileList()
        profiles.append_profile(ssp)
        if ssp_min:
            profiles.append_profile(ssp_min)
        if ssp_max:
            profiles.append_profile(ssp_max)
        profiles.current_index = 0

        # logger.debug("retrieved: %s" % profiles)

        return profiles
Beispiel #2
0
    def query(self, nc_path: str, lat: float,
              lon: float) -> Optional[ProfileList]:
        if not os.path.exists(nc_path):
            raise RuntimeError('Unable to locate %s' % nc_path)
        logger.debug('nc path: %s' % nc_path)

        if (lat is None) or (lon is None):
            logger.error("invalid location query: (%s, %s)" % (lon, lat))
            return None
        logger.debug('query location: %s, %s' % (lat, lon))

        progress = CliProgress()

        try:
            self._file = Dataset(nc_path)
            progress.update(20)

        except (RuntimeError, IOError) as e:
            logger.warning("unable to access data: %s" % e)
            self.clear_data()
            progress.end()
            return None

        try:
            self.name = self._file.title

            time = self._file.variables['time']
            self._timestamp = num2date(time[0], units=time.units)
            logger.debug("Retrieved time: %s" % self._timestamp.isoformat())

            # Now get latitudes, longitudes and depths for x,y,z referencing
            self._lats = self._file.variables['lat'][:]
            self._lons = self._file.variables['lon'][:]
            # logger.debug('lat:(%s)\n%s' % (self._lats.shape, self._lats))
            # logger.debug('lon:(%s)\n%s' % (self._lons.shape, self._lons))

            self._zeta = self._file.variables['zeta'][0, :]
            self._siglay = self._file.variables['siglay'][:]
            self._h = self._file.variables['h'][:]
            # logger.debug('zeta:(%s)\n%s' % (self._zeta.shape, self._zeta))
            # logger.debug('siglay:(%s)\n%s' % (self._siglay.shape, self._siglay[:, 0]))
            # logger.debug('h:(%s)\n%s' % (self._h.shape, self._h))

            self._temp = self._file.variables['temp'][:]
            self._sal = self._file.variables['salinity'][:]
            # logger.debug('temp:(%s)\n%s' % (self._temp.shape, self._temp[:, 0]))
            # logger.debug('sal:(%s)\n%s' % (self._sal.shape, self._sal[:, 0]))

        except Exception as e:
            logger.error(
                "troubles in variable lookup for lat/long grid and/or depth: %s"
                % e)
            self.clear_data()
            progress.end()
            return None

        min_dist = 100000.0
        min_idx = None
        for idx, _ in enumerate(self._lats):
            nc_lat = self._lats[idx]
            nc_lon = self._lons[idx]
            if nc_lon > 180.0:
                nc_lon = nc_lon - 360.0
            nc_dist = self.g.distance(nc_lon, nc_lat, lon, lat)
            # logger.debug('loc: %.6f, %.6f -> %.6f' % (nc_lat, nc_lon, nc_dist))
            if nc_dist < min_dist:
                min_dist = nc_dist
                min_idx = idx
        if min_dist >= 10000.0:
            logger.error("location too far from model nodes: %.f" % min_dist)
            self.clear_data()
            progress.end()
            return None

        self._loc_idx = min_idx
        self._lon = self._lons[self._loc_idx]
        if self._lon > 180.0:
            self._lon = self._lon - 360.0
        self._lat = self._lats[self._loc_idx]
        logger.debug('closest node: %d [%s, %s] -> %s' %
                     (self._loc_idx, self._lat, self._lon, min_dist))

        zeta = self._zeta[self._loc_idx]
        h = self._h[self._loc_idx]
        siglay = -self._siglay[:, self._loc_idx]
        # logger.debug('zeta: %s, h: %s, siglay: %s' % (zeta, h, siglay))
        self._d = siglay * (h + zeta)
        # logger.debug('d:(%s)\n%s' % (self._h.shape, self._d))

        # Make a new SV object to return our query in
        ssp = Profile()
        ssp.meta.sensor_type = Dicts.sensor_types['Synthetic']
        ssp.meta.probe_type = Dicts.probe_types[self.name]
        ssp.meta.latitude = self._lat
        ssp.meta.longitude = self._lon
        ssp.meta.utc_time = dt(year=self._timestamp.year,
                               month=self._timestamp.month,
                               day=self._timestamp.day,
                               hour=self._timestamp.hour,
                               minute=self._timestamp.minute,
                               second=self._timestamp.second)
        ssp.meta.original_path = "%s_%s" % (
            self.name, self._timestamp.strftime("%Y%m%d_%H%M%S"))
        ssp.init_data(self._d.shape[0])
        ssp.data.depth = self._d[:]
        ssp.data.temp = self._temp[0, :, self._loc_idx]
        ssp.data.sal = self._sal[0, :, self._loc_idx]
        ssp.calc_data_speed()
        ssp.clone_data_to_proc()
        ssp.init_sis()

        profiles = ProfileList()
        profiles.append_profile(ssp)

        progress.end()
        return profiles
Beispiel #3
0
    def query(self,
              lat: Optional[float],
              lon: Optional[float],
              datestamp: Union[date, dt, None] = None,
              server_mode: bool = False):
        """Query RTOFS for passed location and timestamp"""
        if datestamp is None:
            datestamp = dt.utcnow()
        if isinstance(datestamp, dt):
            datestamp = datestamp.date()
        if not isinstance(datestamp, date):
            raise RuntimeError("invalid date passed: %s" % type(datestamp))
        logger.debug("query: %s @ (%.6f, %.6f)" % (datestamp, lon, lat))

        # check the inputs
        if (lat is None) or (lon is None) or (datestamp is None):
            logger.error("invalid query: %s @ (%s, %s)" %
                         (datestamp.strftime("%Y%m%d"), lon, lat))
            return None

        try:
            lat_idx, lon_idx = self.grid_coords(lat,
                                                lon,
                                                datestamp=datestamp,
                                                server_mode=server_mode)
            if lat_idx is None:
                logger.info("location outside of GoMOFS coverage")
                return None

        except TypeError as e:
            logger.critical("while converting location to grid coords, %s" % e)
            return None

        logger.debug("idx > lat: %s, lon: %s" % (lat_idx, lon_idx))
        lat_s_idx = lat_idx - self._search_half_window
        if lat_s_idx < 0:
            lat_s_idx = 0
        lat_n_idx = lat_idx + self._search_half_window
        if lat_n_idx >= self._lat.shape[0]:
            lat_n_idx = self._lat.shape[0] - 1
        lon_w_idx = lon_idx - self._search_half_window
        if lon_w_idx < 0:
            lon_w_idx = 0
        lon_e_idx = lon_idx + self._search_half_window
        if lon_e_idx >= self._lon.shape[1]:
            lon_e_idx = self._lon.shape[1] - 1
        # logger.info("indices -> %s %s %s %s" % (lat_s_idx, lat_n_idx, lon_w_idx, lon_e_idx))
        lat_search_window = lat_n_idx - lat_s_idx + 1
        lon_search_window = lon_e_idx - lon_w_idx + 1
        logger.info("updated search window: (%s, %s)" %
                    (lat_search_window, lon_search_window))

        # Need +1 on the north and east indices since it is the "stop" value in these slices
        t = self._file.variables['temp'][self._day_idx, :,
                                         lat_s_idx:lat_n_idx + 1,
                                         lon_w_idx:lon_e_idx + 1]
        s = self._file.variables['salt'][self._day_idx, :,
                                         lat_s_idx:lat_n_idx + 1,
                                         lon_w_idx:lon_e_idx + 1]
        # Set 'unfilled' elements to NANs (BUT when the entire array has valid data, it returns numpy.ndarray)
        if isinstance(t, np.ma.core.MaskedArray):
            t_mask = t.mask
            t._sharedmask = False
            t[t_mask] = np.nan
        if isinstance(s, np.ma.core.MaskedArray):
            s_mask = s.mask
            s._sharedmask = False
            s[s_mask] = np.nan

        # Calculate distances from requested position to each of the grid node locations
        distances = np.zeros(
            (self._d.size, lon_search_window, lat_search_window))
        longitudes = self._lon[lat_s_idx:lat_n_idx + 1,
                               lon_w_idx:lon_e_idx + 1]
        latitudes = self._lat[lat_s_idx:lat_n_idx + 1, lon_w_idx:lon_e_idx + 1]

        for i in range(lat_search_window):

            for j in range(lon_search_window):
                dist = self.g.distance(longitudes[i, j], latitudes[i, j], lon,
                                       lat)
                distances[:, i, j] = dist
                # logger.info("node (%s %s), pos: %3.2f, %3.2f, dist: %3.1f"
                #             % (i, j, latitudes[i, j], longitudes[i, j], distances[0, i, j]))

        # Get mask of "no data" elements and replace these with NaNs in distance array
        t_mask = np.isnan(t)
        distances[t_mask] = np.nan
        s_mask = np.isnan(s)
        distances[s_mask] = np.nan
        # logger.info("distance array:\n%s" % distances[0])

        # Spin through all the depth levels
        temp_pot = np.zeros(self._d.size)
        temp_in_situ = np.zeros(self._d.size)
        d = np.zeros(self._d.size)
        sal = np.zeros(self._d.size)
        num_values = 0
        for i in range(self._d.size):

            t_level = t[i]
            s_level = s[i]
            d_level = distances[i]

            try:
                ind = np.nanargmin(d_level)
            except ValueError:
                # logger.info("%s: all-NaN slices" % i)
                continue

            if np.isnan(ind):
                logger.info("%s: bottom of valid data" % i)
                break

            ind2 = np.unravel_index(ind, t_level.shape)

            t_closest = t_level[ind2]
            s_closest = s_level[ind2]
            # d_closest = d_level[ind2]

            temp_pot[i] = t_closest
            sal[i] = s_closest
            d[i] = self._d[i]

            # Calculate in-situ temperature
            p = Oc.d2p(d[i], lat)
            temp_in_situ[i] = Oc.in_situ_temp(s=sal[i],
                                              t=t_closest,
                                              p=p,
                                              pr=self._ref_p)
            # logger.info("%02d: %6.1f %6.1f > T/S/Dist: %3.1f %3.1f %3.1f [pot.temp. %3.1f]"
            #             % (i, d[i], p, temp_in_situ[i], s_closest, d_closest, t_closest))

            num_values += 1

        if num_values == 0:
            logger.info("no data from lookup!")
            return None

        # ind = np.nanargmin(distances[0])
        # ind2 = np.unravel_index(ind, distances[0].shape)
        # switching to the query location
        # lat_out = latitudes[ind2]
        # lon_out = longitudes[ind2]
        # while lon_out > 180.0:
        #     lon_out -= 360.0

        # Make a new SV object to return our query in
        ssp = Profile()
        ssp.meta.sensor_type = Dicts.sensor_types['Synthetic']
        ssp.meta.probe_type = Dicts.probe_types['GoMOFS']
        ssp.meta.latitude = lat
        if lon > 180.0:  # Go back to negative longitude
            lon -= 360.0
        ssp.meta.longitude = lon
        ssp.meta.utc_time = dt(year=datestamp.year,
                               month=datestamp.month,
                               day=datestamp.day)
        ssp.meta.original_path = "GoMOFS_%s" % datestamp.strftime("%Y%m%d")
        ssp.init_data(num_values)
        ssp.data.depth = d[0:num_values]
        ssp.data.temp = temp_in_situ[0:num_values]
        ssp.data.sal = sal[0:num_values]
        ssp.calc_data_speed()
        ssp.clone_data_to_proc()
        ssp.init_sis()

        profiles = ProfileList()
        profiles.append_profile(ssp)

        return profiles
Beispiel #4
0
    def query(self, lat: Optional[float], lon: Optional[float], dtstamp: Optional[dt] = None,
              server_mode: bool = False):
        """Query RTOFS for passed location and timestamp"""
        if dtstamp is None:
            dtstamp = dt.utcnow()
        if not isinstance(dtstamp, dt):
            raise RuntimeError("invalid datetime passed: %s" % type(dtstamp))
        logger.debug("query: %s @ (%.6f, %.6f)" % (dtstamp, lon, lat))

        # check the inputs
        if (lat is None) or (lon is None):
            logger.error("invalid query: %s @ (%s, %s)" % (dtstamp.strftime("%Y/%m/%d %H:%M:%S"), lon, lat))
            return None

        try:
            lat_idx, lon_idx = self.grid_coords(lat, lon, dtstamp=dtstamp, server_mode=server_mode)
        except TypeError as e:
            logger.critical("while converting location to grid coords, %s" % e)
            return None
        # logger.debug("idx > lat: %s, lon: %s" % (lat_idx, lon_idx))

        lat_s_idx = lat_idx - self._search_half_window
        lat_n_idx = lat_idx + self._search_half_window
        lon_w_idx = lon_idx - self._search_half_window
        lon_e_idx = lon_idx + self._search_half_window
        # logger.info("indices -> %s %s %s %s" % (lat_s_idx, lat_n_idx, lon_w_idx, lon_e_idx))
        if lon < self._lon_0:  # Make all longitudes safe
            lon += 360.0

        longitudes = np.zeros((self._search_window, self._search_window))
        if (lon_e_idx < self._lon.size) and (lon_w_idx >= 0):
            # logger.info("safe case")

            # Need +1 on the north and east indices since it is the "stop" value in these slices
            t = self._file_temp.variables['temperature'][self._day_idx, :, lat_s_idx:lat_n_idx + 1,
                lon_w_idx:lon_e_idx + 1]
            s = self._file_sal.variables['salinity'][self._day_idx, :, lat_s_idx:lat_n_idx + 1, lon_w_idx:lon_e_idx + 1]
            # Set 'unfilled' elements to NANs (BUT when the entire array has valid data, it returns numpy.ndarray)
            if isinstance(t, np.ma.core.MaskedArray):
                t_mask = t.mask
                t._sharedmask = False
                t[t_mask] = np.nan
            if isinstance(s, np.ma.core.MaskedArray):
                s_mask = s.mask
                s._sharedmask = False
                s[s_mask] = np.nan

            lons = self._lon[lon_w_idx:lon_e_idx + 1]
            for i in range(self._search_window):
                longitudes[i, :] = lons
        else:
            logger.info("split case")

            # --- Do the left portion of the array first, this will run into the wrap longitude
            lon_e_idx = self._lon.size - 1
            # lon_west_index can be negative if lon_index is on the westernmost end of the array
            if lon_w_idx < 0:
                lon_w_idx = lon_w_idx + self._lon.size
            # logger.info("using lon west/east indices -> %s %s" % (lon_w_idx, lon_e_idx))

            # Need +1 on the north and east indices since it is the "stop" value in these slices
            t_left = self._file_temp.variables['temperature'][self._day_idx, :, lat_s_idx:lat_n_idx + 1,
                     lon_w_idx:lon_e_idx + 1]
            s_left = self._file_sal.variables['salinity'][self._day_idx, :, lat_s_idx:lat_n_idx + 1,
                     lon_w_idx:lon_e_idx + 1]
            # Set 'unfilled' elements to NANs (BUT when the entire array has valid data, it returns numpy.ndarray)
            if isinstance(t_left, np.ma.core.MaskedArray):
                t_mask = t_left.mask
                t_left[t_mask] = np.nan
            if isinstance(s_left, np.ma.core.MaskedArray):
                s_mask = s_left.mask
                s_left[s_mask] = np.nan

            lons_left = self._lon[lon_w_idx:lon_e_idx + 1]
            for i in range(self._search_window):
                longitudes[i, 0:lons_left.size] = lons_left
            # logger.info("longitudes are now: %s" % longitudes)

            # --- Do the right portion of the array first, this will run into the wrap
            # longitude so limit it accordingly
            lon_w_idx = 0
            lon_e_idx = self._search_window - lons_left.size - 1

            # Need +1 on the north and east indices since it is the "stop" value in these slices
            t_right = self._file_temp.variables['temperature'][self._day_idx, :, lat_s_idx:lat_n_idx + 1,
                      lon_w_idx:lon_e_idx + 1]
            s_right = self._file_sal.variables['salinity'][self._day_idx, :, lat_s_idx:lat_n_idx + 1,
                      lon_w_idx:lon_e_idx + 1]
            # Set 'unfilled' elements to NANs (BUT when the entire array has valid data, it returns numpy.ndarray)
            if isinstance(t_right, np.ma.core.MaskedArray):
                t_mask = t_right.mask
                t_right[t_mask] = np.nan
            if isinstance(s_right, np.ma.core.MaskedArray):
                s_mask = s_right.mask
                s_right[s_mask] = np.nan

            lons_right = self._lon[lon_w_idx:lon_e_idx + 1]
            for i in range(self._search_window):
                longitudes[i, lons_left.size:self._search_window] = lons_right

            # merge data
            t = np.zeros((self._file_temp.variables['lev'].size, self._search_window, self._search_window))
            t[:, :, 0:lons_left.size] = t_left
            t[:, :, lons_left.size:self._search_window] = t_right
            s = np.zeros((self._file_temp.variables['lev'].size, self._search_window, self._search_window))
            s[:, :, 0:lons_left.size] = s_left
            s[:, :, lons_left.size:self._search_window] = s_right

        # Calculate distances from requested position to each of the grid node locations
        distances = np.zeros((self._d.size, self._search_window, self._search_window))
        latitudes = np.zeros((self._search_window, self._search_window))
        lats = self._lat[lat_s_idx:lat_n_idx + 1]
        for i in range(self._search_window):
            latitudes[:, i] = lats

        for i in range(self._search_window):

            for j in range(self._search_window):
                dist = self.g.distance(longitudes[i, j], latitudes[i, j], lon, lat)
                distances[:, i, j] = dist
                # logger.info("node %s, pos: %3.1f, %3.1f, dist: %3.1f"
                #             % (i, latitudes[i, j], longitudes[i, j], distances[0, i, j]))
        # logger.info("distance array:\n%s" % distances[0])
        # Get mask of "no data" elements and replace these with NaNs in distance array
        t_mask = np.isnan(t)
        distances[t_mask] = np.nan
        s_mask = np.isnan(s)
        distances[s_mask] = np.nan

        # Spin through all the depth levels
        temp_pot = np.zeros(self._d.size)
        temp_in_situ = np.zeros(self._d.size)
        d = np.zeros(self._d.size)
        sal = np.zeros(self._d.size)
        num_values = 0
        for i in range(self._d.size):

            t_level = t[i]
            s_level = s[i]
            d_level = distances[i]

            try:
                ind = np.nanargmin(d_level)
            except ValueError:
                # logger.info("%s: all-NaN slices" % i)
                continue

            if np.isnan(ind):
                logger.info("%s: bottom of valid data" % i)
                break

            ind2 = np.unravel_index(ind, t_level.shape)

            t_closest = t_level[ind2]
            s_closest = s_level[ind2]
            # d_closest = d_level[ind2]

            temp_pot[i] = t_closest
            sal[i] = s_closest
            d[i] = self._d[i]

            # Calculate in-situ temperature
            p = Oc.d2p(d[i], lat)
            temp_in_situ[i] = Oc.in_situ_temp(s=sal[i], t=t_closest, p=p, pr=self._ref_p)
            # logger.info("%02d: %6.1f %6.1f > T/S/Dist: %3.1f %3.1f %3.1f [pot.temp. %3.1f]"
            #             % (i, d[i], p, temp_in_situ[i], s_closest, d_closest, t_closest))

            num_values += 1

        if num_values == 0:
            logger.info("no data from lookup!")
            return None

        # ind = np.nanargmin(distances[0])
        # ind2 = np.unravel_index(ind, distances[0].shape)
        # switching to the query location
        # lat_out = latitudes[ind2]
        # lon_out = longitudes[ind2]
        # while lon_out > 180.0:
        #     lon_out -= 360.0

        # Make a new SV object to return our query in
        ssp = Profile()
        ssp.meta.sensor_type = Dicts.sensor_types['Synthetic']
        ssp.meta.probe_type = Dicts.probe_types['RTOFS']
        ssp.meta.latitude = lat
        if lon > 180.0:  # Go back to negative longitude
            lon -= 360.0
        ssp.meta.longitude = lon
        ssp.meta.utc_time = dt(year=dtstamp.year, month=dtstamp.month, day=dtstamp.day,
                               hour=dtstamp.hour, minute=dtstamp.minute, second=dtstamp.second)
        ssp.meta.original_path = "RTOFS_%s" % dtstamp.strftime("%Y%m%d_%H%M%S")
        ssp.init_data(num_values)
        ssp.data.depth = d[0:num_values]
        ssp.data.temp = temp_in_situ[0:num_values]
        ssp.data.sal = sal[0:num_values]
        ssp.calc_data_speed()
        ssp.clone_data_to_proc()
        ssp.init_sis()

        profiles = ProfileList()
        profiles.append_profile(ssp)

        return profiles