示例#1
0
    def __init__(self, bounds=None, parameterName=''):
        """
        Parameters
        ----------
        bounds : nephelae.types.Bounds

        defaultValue : any type
        """
        super().__init__(parameterName=parameterName)
        if isinstance(bounds, Bounds) or bounds is None:
            self.bounds = bounds
        else:  # Assuming a list of min and max value
            self.bounds = Bounds(bounds[0], bounds[-1])
示例#2
0
def build_mesonh_probe(aircraft,
                       mesonhFiles,
                       mesonhVariables=[],
                       targetCacheBounds=[[0, 20], [-500, 500], [-500, 500],
                                          [-400, 200]],
                       updateThreshold=0.25,
                       rctFeedback=True,
                       defaultRctBounds=Bounds(0.0, 1.0e-5),
                       mesonhOrigin=None):

    if isinstance(defaultRctBounds, list):
        defaultRctBounds = Bounds(defaultRctBounds[0], defaultRctBounds[-1])
    aircraft.load_plugin(MesonhProbe, mesonhFiles, mesonhVariables,
                         targetCacheBounds, updateThreshold, rctFeedback,
                         defaultRctBounds, mesonhOrigin)
示例#3
0
def compute_bounding_box(scaledArr, threshold):
    """
    Computes the bounds where an element is spotted. Returns the list of bounds
    of all elements.
    
    Parameters
    --------
    scaledArr_data : ScaledArray
        Contains the data of interest

    Returns
    ---------
    List
        The list of bounds of elements in the ScaledArray.
        The length of the list is equal to number_of_elements.
    """
    data_labeled, number_of_elements = get_number_of_elements(scaledArr,
            threshold)
    list_of_boxes = []
    for i in range(1, number_of_elements+1):
        out = np.where(data_labeled == i)
        locations = np.array([X for X in out])
        mins = scaledArr.dimHelper.to_unit(np.amin(locations, axis=1).tolist())
        maxs = scaledArr.dimHelper.to_unit(np.amax(locations, axis=1).tolist())
        list_of_boxes.append([Bounds(mins[i], maxs[i]) for i in
            range(len(mins))])
    return list_of_boxes
示例#4
0
    def load_mesonh_map(self, mapId, config):
        """
        load_mesonh_map

        Loads a single MesonhMap from a yaml parsed configuration and add it to
        the self.maps attribute.
        """
        if self.mesonhDataset is None:
            warn("No mesonh files were given in configuration file. " +
                 "Cannot instanciate MesonhMap '" + config['name'] + "'")
            return

        # Populating parameters for MesonhMap init
        params = {
            'name':
            config['name'],
            'atm':
            self.mesonhDataset,
            'mesonhVar':
            config['mesonh_variable'],
            'threshold':
            (config['threshold'] if 'threshold' in config.keys() else 0)
        }

        if 'interpolation' in config.keys():
            params['interpolation'] = config['interpolation']
        if 'origin' in config.keys():
            params['origin'] = config['origin']

        # Instanciation
        self.maps[mapId] = MesonhMap(**params)

        if 'data_range' in config.keys():
            self.maps[mapId].dataRange = (Bounds(rng[0], rng[1]), )
    def find_bounds(self, tags=[], keys=None, assumePositiveTime=False):
        """
        keys : a tuple of slices(float,float,None)
               slices values are bounds of a 4D cube in which are the
               requested data
               There must exactly be 4 slices in the tuple
        """

        outputDict = self.build_entry_dict(tags, keys, assumePositiveTime)
        bounds = [Bounds(), Bounds(), Bounds(), Bounds()]
        for l in outputDict.values():
            if len(l) != 4:
                continue
            bounds[0].update(l[0].data.position.t)
            bounds[1].update(l[0].data.position.x)
            bounds[2].update(l[0].data.position.y)
            bounds[3].update(l[0].data.position.z)
        return bounds
示例#6
0
    def load_gpr_map(self, mapId, config):
        """
        load_gpr_map

        Loads a ValueMap from a yaml parsed configuration and add it to the
        self.maps attributes. Depending on the configuration, may also load a
        StdMap with the same GprPredictor.
        """

        if 'kernel' not in config.keys():
            warn("No kernel defined for GprMap '" + str(config['name']) +
                 "'. " + "Cannot instanciate this map.")
            return
        if config['kernel'] not in self.kernels.keys():
            warn("No kernel '" + config['kernel'] + " defined. " +
                 "Cannot instanciate '" + config['name'] + "' map.")
            return

        # gpr = GprPredictor(config['name'], self.database,
        #                 config['database_tags'],
        #                 self.kernels[config['kernel']])
        gpr = GprPredictor(config['name'], self.dataviews[config['data_view']],
                           self.kernels[config['kernel']])

        if 'threshold' in config.keys():
            gpr.threshold = config['threshold']

        if 'data_range' in config.keys():
            gpr.dataRange = (Bounds(config['data_range'][0],
                                    config['data_range'][1]), )
            gpr.updateRange = False

        self.maps[mapId] = ValueMap(config['name'], gpr)

        if 'std_map' in config.keys():
            # If std_map if defined in the config, creating a new StdMap with
            # the same GprPredictor as the above StdMap.
            if mapId + '_std' in self.maps.keys():
                warn("The map '" + mapId +
                     "_std' id is already defined. Cannot " + "instanciate '" +
                     config['std_map'] + "' map.")
            else:
                self.maps[mapId + '_std'] = StdMap(config['std_map'], gpr)

        if 'border_map' in config.keys():
            if mapId + '_border' in self.maps.keys():
                warn("The map '" + mapId +
                     "_std' id is already defined. Cannot " + "instanciate '" +
                     config['border_map'] + "' map.")
            else:
                if mapId + '_std' in self.maps.keys():
                    self.maps[mapId + '_border'] = BorderIncertitude(
                        config['border_map'], self.maps[mapId],
                        self.maps[mapId + '_std'])
                else:
                    self.maps[mapId + '_border'] = BorderRaw(
                        config['border_map'], self.maps[mapId])
示例#7
0
    def __getitem__(self, keys):
        """
        This is the main function which will be called to compare maps. It
        outputs a dictionary with several fields useful for display and
        evaluation.

        /!\ Only compatible for 2D slices for now
        """
        # Getting a reference map slice
        self.slice1 = self.map1.__getitem__(keys)

        # In the case of a MesonhMap, the bounds of the output might slightly
        # differ from the requested ones because the underlying data sampling
        # might not match the requested keys. (keys value between voxels of the
        # MesonhMap). Slice1 contains the bounds closest to requested keys that
        # match the MesoNH sampling.
        # /!\ The bounds given match the position of the center of border
        # voxels. More on that below
        self.bounds = self.slice1.bounds

        # Here new keys are calculated to request data from self.map2. For the
        # data to be comparable, slice2 must be resampled to match the
        # resolution of slice1. However, given how the data are interpolated,
        # and how low is the resolution, the size of the voxels are not
        # negligible and the outer limits of the slices must be carefully
        # aligned : self.map1 and self.map2 scales are related to the center of
        # the voxels. So slice2 must be requested in a way that slice1 and
        # slice2 outer border fall on the same position.

        self.newKeys = []
        i = 0
        for key in keys:
            if isinstance(key, slice):
                self.newKeys.append(
                    slice(self.bounds[i].min, self.bounds[i].max))
                i = i + 1
            else:
                self.newKeys.append(key)

        self.externalBounds = []
        i = 0
        for key, res1 in zip(keys, self.map1.resolution()):
            if isinstance(key, slice):
                self.externalBounds.append(
                    Bounds(self.bounds[i].min - res1 / 2.0,
                           self.bounds[i].max + res1 / 2.0))
                i = i + 1

        self.slice2 = self.map2.__getitem__(self.newKeys)

        # This is the part that does not allow anything else than 2D slices
        self.data2resampled = np.array(
            Image.fromarray(self.slice2.data.T).resize(self.slice1.shape,
                                                       Image.BICUBIC)).T

        return self.slice1.data, self.slice2.data, self.data2resampled
示例#8
0
    def __initplugin__(self,
                       mesonhFiles,
                       mesonhVariables=[],
                       targetCacheBounds=[[0, 20], [-500, 500], [-500, 500],
                                          [-400, 200]],
                       updateThreshold=0.25,
                       rctFeedback=True,
                       defaultRctBounds=Bounds(0.0, 1.0e-5),
                       mesonhOrigin=None):

        self.mesonhInitialized = False
        self.rctFeedback = rctFeedback
        self.windFeedback = rctFeedback
        self.add_notification_method('add_sample')

        # Check if proper variables are fetched to do the feedback
        if self.rctFeedback and 'RCT' not in mesonhVariables:
            mesonhVariables.append('RCT')

        if isinstance(mesonhFiles, MesonhDataset):
            self.atm = mesonhFiles
        else:
            self.atm = MesonhDataset(mesonhFiles)
        self.probes = {}
        for var in mesonhVariables:
            mesonhVar = MesonhVariable(self.atm,
                                       var,
                                       origin=mesonhOrigin,
                                       interpolation='linear')
            self.probes[str(var)] = MesonhCachedProbe(mesonhVar,
                                                      targetCacheBounds,
                                                      updateThreshold)
            self.probes[str(var)].start()

        b = self.probes['RCT'].var.actual_range[0]
        if b.min is None or b.max is None:
            self.rctBounds = defaultRctBounds
            print(
                "Warning. This Mesonh dataset does not seem to define " +
                "the range of its RCT variable. Using default value.",
                defaultRctBounds)
        else:
            self.rctBounds = Bounds(b[0], b[-1])
示例#9
0
class SimpleBounds(ParameterRules):
    """
    SimpleBounds

    Implements a simple bound checking.

    Attributes
    ----------
    bounds : nephelae.type.Bounds
        Bounds to compare parameter with.

    Methods
    -------
    check(AnyType) -> AnyType: raise ValueError
        If parameter is not None, check if inside self.bounds. If not, raise
        ValueError exception, if yes, returns checked parameter.
    """
    def __init__(self, bounds=None, parameterName=''):
        """
        Parameters
        ----------
        bounds : nephelae.types.Bounds

        defaultValue : any type
        """
        super().__init__(parameterName=parameterName)
        if isinstance(bounds, Bounds) or bounds is None:
            self.bounds = bounds
        else:  # Assuming a list of min and max value
            self.bounds = Bounds(bounds[0], bounds[-1])

    def description(self):
        return "Bounds : " + str(self.bounds)

    def check(self, parameterValue):
        if not self.bounds.isinside(parameterValue):
            raise ValueError("Parameter " + self.parameterName + \
                             " (" + str(parameterValue) + ") is not inside " +\
                             str(self.bounds))
        return parameterValue

    def summary(self):
        return {'bounds': {'min': self.bounds.min, 'max': self.bounds.max}}
示例#10
0
    def __init__(self, atm, var, origin=None, interpolation='linear'):

        tdim = atm.dimensions[0]['data']
        xdim = atm.dimensions[3]['data']
        ydim = atm.dimensions[2]['data']
        zdim = atm.dimensions[1]['data']

        # Seems more logical to start a simulation at time 0.0 in any case ?
        tdim = tdim - tdim[0]

        # Reset origin if given
        if origin is not None:
            tdim = tdim - tdim[0] + origin[0]
            xdim = xdim - xdim[0] + origin[1]
            ydim = ydim - ydim[0] + origin[2]
            zdim = zdim - zdim[0] + origin[3]

        self.resolution = ((tdim[-1] - tdim[0]) / (len(tdim) - 1),
                           (xdim[-1] - xdim[0]) / (len(xdim) - 1),
                           (ydim[-1] - ydim[0]) / (len(ydim) - 1),
                           (zdim[-1] - zdim[0]) / (len(zdim) - 1))

        dimHelper = DimensionHelper()
        # dimHelper.add_dimension(tdim, 'LUT')
        dimHelper.add_dimension(tdim, 'linear')
        dimHelper.add_dimension(xdim, 'linear')
        dimHelper.add_dimension(ydim, 'linear')
        dimHelper.add_dimension(zdim, 'LUT')

        # Creating a ScaledArray with a PeriodicContainer as base
        # (MesoNH x,y are periodic)
        super().__init__(
            PeriodicContainer(MesonhInterface(atm, var), [0, 1, 2]), dimHelper,
            interpolation)

        actual_range = []
        for var in self.data.data.varData:
            if not hasattr(var, 'actual_range'):
                actual_range.append(Bounds())
            else:
                actual_range.append(var.actual_range)
        self.actual_range = tuple(actual_range)
示例#11
0
    def __init__(self,
                 name,
                 dataview,
                 kernel,
                 dataRange=(Bounds(0, 0), ),
                 updateRange=True,
                 threshold=0):
        """
        name : str
            Name of the computed map. Must be unique.

        database (nephelae_mapping.database):
            database from which fetch the relevant data for map computation.

        databaseTags : list(str, ...)
            tags for searching data in the database.

        kernel : sklearn.gaussian_process.kernel.Kernel derived type
            Kernel used in GPR.
        """
        super().__init__(name, threshold=threshold)
        # self.database       = database
        # self.databaseTags   = databaseTags
        self.dataview = dataview
        self.kernel = kernel
        self.gprProc = GaussianProcessRegressor(self.kernel,
                                                alpha=0.0,
                                                optimizer=None,
                                                copy_X_train=False)
        self.cache = None
        self.keys = None
        self.locationsLock = threading.Lock()
        self.getItemLock = threading.Lock()
        self.computeStd = False
        self.updateRange = updateRange
        self.dataRange = dataRange
示例#12
0
    def bounds(self):

        maxSlice = self.to_unit(slice(None,None,None))
        return Bounds(maxSlice.start, maxSlice.stop)
示例#13
0
noiseStddev = 0.1 * np.sqrt(processVariance)
# kernel0 = WindKernel(lengthScales, processVariance, noiseStddev**2, v0)
# lengthScales = [70.0, 60.0, 60.0, 60.0]
lengthScales = [70.0, 80.0, 80.0, 60.0]
kernel0 = WindKernel(lengthScales, processVariance, noiseStddev**2, WindMapConstant('Wind',v0))

dtfile = 'output/wind_data04.neph'
dtbase = NephelaeDataServer.load(dtfile)

gpr = GprPredictor(dtbase, ['RCT'], kernel0)
map_gpr = ValueMap('RCT_gpr', gpr)
std_gpr = StdMap('RCT_gpr', gpr)

t0 = 200.0
z0 = 1100.0
b = [Bounds(0.0, 715), Bounds(12.5, 6387.5), Bounds(1837.5, 2712.5), Bounds(12.5, 3987)]

mesonhSlice = rct[t0, b[1].min:b[1].max, b[2].min:b[2].max, z0]
r2 = map_gpr.resolution()[1] / 2.0
gprSlice    = map_gpr[t0, b[1].min+r2:b[1].max-r2, b[2].min+r2:b[2].max-r2, z0]

# gprCenter0 = compute_com(gprSlice)
# gprSlice.data[gprSlice.data < 0] = 0.0
# gprCenter1 = compute_com(gprSlice)

gprSliceResampled = np.array(Image.fromarray(gprSlice.data.T).resize(mesonhSlice.shape, Image.BICUBIC)).T

mask = np.zeros(mesonhSlice.data.shape)
mask[gprSliceResampled > 1.0e-10] = 1.0
mesonhSlice.data = mesonhSlice.data * mask
gprSliceResampled = gprSliceResampled * mask
示例#14
0
#! /usr/bin/python3

import sys
sys.path.append('../../')
import numpy as np

from nephelae.types import Bounds

a0 = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
b0 = Bounds.from_array(a0)

print(b0)
示例#15
0
class MesonhProbe:
    """
    MesonhProbe

    Aircraft plugin to get data from a MesonhFile and send a LWC feedback
    to the aircraft.
    """
    def __pluginmethods__():
        return [{
            'name': 'stop',
            'method': MesonhProbe.stop,
            'conflictMode': 'prepend'
        }, {
            'name': 'flight_param_callback',
            'method': MesonhProbe.flight_param_callback,
            'conflictMode': 'append'
        }, {
            'name': 'add_sensor_observer',
            'method': MesonhProbe.add_sensor_observer,
            'conflictMode': 'abort'
        }, {
            'name': 'remove_sensor_observer',
            'method': MesonhProbe.remove_sensor_observer,
            'conflictMode': 'abort'
        }, {
            'name': 'rct_feedback',
            'method': MesonhProbe.rct_feedback,
            'conflictMode': 'error'
        }]

    def __initplugin__(self,
                       mesonhFiles,
                       mesonhVariables=[],
                       targetCacheBounds=[[0, 20], [-500, 500], [-500, 500],
                                          [-400, 200]],
                       updateThreshold=0.25,
                       rctFeedback=True,
                       defaultRctBounds=Bounds(0.0, 1.0e-5),
                       mesonhOrigin=None):

        self.mesonhInitialized = False
        self.rctFeedback = rctFeedback
        self.windFeedback = rctFeedback
        self.add_notification_method('add_sample')

        # Check if proper variables are fetched to do the feedback
        if self.rctFeedback and 'RCT' not in mesonhVariables:
            mesonhVariables.append('RCT')

        if isinstance(mesonhFiles, MesonhDataset):
            self.atm = mesonhFiles
        else:
            self.atm = MesonhDataset(mesonhFiles)
        self.probes = {}
        for var in mesonhVariables:
            mesonhVar = MesonhVariable(self.atm,
                                       var,
                                       origin=mesonhOrigin,
                                       interpolation='linear')
            self.probes[str(var)] = MesonhCachedProbe(mesonhVar,
                                                      targetCacheBounds,
                                                      updateThreshold)
            self.probes[str(var)].start()

        b = self.probes['RCT'].var.actual_range[0]
        if b.min is None or b.max is None:
            self.rctBounds = defaultRctBounds
            print(
                "Warning. This Mesonh dataset does not seem to define " +
                "the range of its RCT variable. Using default value.",
                defaultRctBounds)
        else:
            self.rctBounds = Bounds(b[0], b[-1])

    def stop(self):
        for probe in self.probes.values():
            probe.stop()

    def flight_param_callback(self, msg):

        position = self.status.position.copy()
        readKeys = (position.t, position.x, position.y, position.z)
        if not self.mesonhInitialized:
            try:
                for probe in self.probes.values():
                    probe.request_cache_update(readKeys, block=True)
            except Exception as e:  # !!!!!!!!!!!!!! Fix this !
                print(traceback.format_exc())
                print("Could not read, feedback :", e)
                return
            self.mesonhInitialized = True

        for var in self.probes.keys():
            try:
                value = self.probes[var][readKeys]
                sample = SensorSample(variableName=var,
                                      producer=self.id,
                                      timeStamp=position.t,
                                      position=position,
                                      data=[value])
                self.add_sample(sample)

                if var == 'RCT' and self.rctFeedback:
                    self.rct_feedback(value)
            except Exception as e:  # !!!!!!!!!!!!!! Fix this !
                print(traceback.format_exc())
                print("Could not read, feedback :", e)

    def add_sensor_observer(self, observer):
        self.attach_observer(observer, 'add_sample')

    def remove_sensor_observer(self, observer):
        self.detach_observer(observer, 'add_sample')

    def rct_feedback(self, rctValue):
        msg = PprzMessage('datalink', 'PAYLOAD_COMMAND')
        msg['ac_id'] = int(self.id)
        msg['command'] = [
            max(0, min(255, int(255 * rctValue / self.rctBounds.span())))
        ]
        messageInterface.send(msg)
示例#16
0
import numpy as np
import matplotlib.pyplot as plt

from nephelae.types import Bounds
from nephelae.mapping import MapServer
from nephelae_mesonh import MesonhMap, MesonhDataset

mesonhFiles = "/home/pnarvor/work/nephelae/data/nephelae-remote/MesoNH02/bomex_hf.nc"

dataset = MesonhDataset(mesonhFiles)
maps = {
    'lwc': MesonhMap("Liquid Water Content", dataset, 'RCT'),
    'wt': MesonhMap("Vertical Wind", dataset, 'WT'),
    'hwind': MesonhMap("Horizontal Wind", dataset, ['UT', 'VT'])
}

mapServer0 = MapServer(mapSet=maps,
                       mapBounds=(None, Bounds(300.0, 800.0), None, None))
mapServer1 = MapServer(mapSet=maps,
                       mapBounds=(None, Bounds(300.0,
                                               800.0), Bounds(0.0,
                                                              500.0), None))

fig, axes = plt.subplots(2, 1, sharex=True)
# axes[0].imshow(mapServer0['lwc'][0.0,:,0.0:12000.0,650.0].data.T, origin='lower')
# axes[1].imshow(mapServer1['lwc'][0.0,:,0.0:12000.0,650.0].data.T, origin='lower')
axes[0].imshow(mapServer0['wt'][0.0, :, :, 650.0].data.T, origin='lower')
axes[1].imshow(mapServer1['wt'][0.0, :, :, 650.0].data.T, origin='lower')

plt.show(block=False)
示例#17
0
 def __compute_bounding_box(self):
     mins = self.__scArr.dimHelper.to_unit(np.amin(self.get_locations(),
         axis=1).tolist())
     maxs = self.__scArr.dimHelper.to_unit(np.amax(self.get_locations(),
         axis=1).tolist())
     self.__boundingBox = [Bounds(mins[i], maxs[i]) for i in range(len(mins))]
示例#18
0
    def at_locations(self, locations, locBounds=None):
        """Computes predicted value at each given location using GPR.

        This method is used in the map interface when requesting a dense map.
        When requesting a dense map, each location must be the position of
        on pixel of the requested map.

        This method automatically fetch relevant data from self.database
        to compute the predicted values.

        Parameters
        ----------
        locations : numpy.array (N x 4)
            Locations N x (t,x,y,z) for each of the N points where to compute
            a predicted value using GPR. 
            Note : this implementation of GprPredictor does not enforce the
            use of a 4D space (t,x,y,z) for location. However, the
            self.database attribute is most likely a
            nephelae.database.NephelaeDataServer type, which enforce the
            use of a 4D (t,x,y,z) space.

        Returns : numpy.array (N x M)
            Predicted values at locations. Can be more than 1 dimensional
            depending on the data fetched from the database.
            Example : If the database contains samples of 2D wind vector.
            The samples are 2D. The predicted map is then a 2D field vector
            defined on a 4D space-time.

        Note : This method probably needs more refining.
        (TODO : investigate this)
        """
        with self.locationsLock:
            if locBounds is None:
                kernelSpan = self.kernel.span()
                locBounds = Bounds.from_array(locations.T)

                locBounds[0].min = locBounds[0].min - kernelSpan[0]
                locBounds[0].max = locBounds[0].max + kernelSpan[0]

                locBounds[1].min = locBounds[1].min - kernelSpan[1]
                locBounds[1].max = locBounds[1].max + kernelSpan[1]

                locBounds[2].min = locBounds[2].min - kernelSpan[2]
                locBounds[2].max = locBounds[2].max + kernelSpan[2]

                locBounds[3].min = locBounds[3].min - kernelSpan[3]
                locBounds[3].max = locBounds[3].max + kernelSpan[3]

            # samples = [entry.data for entry in \
            #         self.database[self.databaseTags]\
            #         (assumePositiveTime=False)\
            #         [locBounds[0].min:locBounds[0].max,
            #         locBounds[1].min:locBounds[1].max,
            #         locBounds[2].min:locBounds[2].max,
            #         locBounds[3].min:locBounds[3].max]]
            samples = self.dataview[locBounds[0].min:locBounds[0].max,
                                    locBounds[1].min:locBounds[1].max,
                                    locBounds[2].min:locBounds[2].max,
                                    locBounds[3].min:locBounds[3].max]

            if len(samples) < 1:
                return (np.ones((locations.shape[0], 1)) * self.kernel.mean,
                        np.ones(locations.shape[0]) * self.kernel.variance)

            else:

                trainLocations =\
                    np.array([[s.position.t,\
                    s.position.x,\
                    s.position.y,\
                    s.position.z]\
                    for s in samples])

                trainValues = np.array([s.data for s in samples]).squeeze()
                if len(trainValues.shape) < 2:
                    trainValues = trainValues.reshape(-1, 1)

                boundingBox = (np.min(trainLocations,
                                      axis=0), np.max(trainLocations, axis=0))

                dt = boundingBox[1][0] - boundingBox[0][0]

                wind = self.kernel.windMap.get_wind()

                dx, dy = dt * wind

                boundingBox[0][1] = min(boundingBox[0][1],
                                        boundingBox[0][1] + dx)
                boundingBox[1][1] = max(boundingBox[1][1],
                                        boundingBox[1][1] + dx)

                boundingBox[0][2] = min(boundingBox[0][2],
                                        boundingBox[0][2] + dy)
                boundingBox[1][2] = max(boundingBox[1][2],
                                        boundingBox[1][2] + dy)

                same_locations = np.where(
                    np.logical_and(
                        np.logical_and(
                            np.logical_and(
                                locations[:, 0] >=
                                boundingBox[0][0] - kernelSpan[0],
                                locations[:, 0] <=
                                boundingBox[1][0] + kernelSpan[0]),
                            np.logical_and(
                                locations[:, 1] >=
                                boundingBox[0][1] - kernelSpan[1],
                                locations[:, 1] <=
                                boundingBox[1][1] + kernelSpan[1])),
                        np.logical_and(
                            np.logical_and(
                                locations[:, 2] >=
                                boundingBox[0][2] - kernelSpan[2],
                                locations[:, 2] <=
                                boundingBox[1][2] + kernelSpan[2]),
                            np.logical_and(
                                locations[:, 3] >=
                                boundingBox[0][3] - kernelSpan[3],
                                locations[:, 3] <=
                                boundingBox[1][3] + kernelSpan[3]))))[0]

                selected_locations = locations[same_locations]
                self.gprProc.fit(trainLocations, trainValues)
                computed_locations = self.gprProc.predict(
                    selected_locations, return_std=self.computeStd)

                if self.computeStd:
                    val_res = np.ones(
                        (locations.shape[0], 1)) * self.kernel.mean
                    std_res = \
                    np.ones(locations.shape[0])*np.sqrt(self.kernel.variance +
                            self.kernel.noiseVariance)
                    np.put(val_res, same_locations, computed_locations[0])
                    np.put(std_res, same_locations, computed_locations[1])
                    val_return = (val_res, std_res)
                else:
                    res = np.ones((locations.shape[0], 1)) * self.kernel.mean
                    np.put(res, same_locations, computed_locations)
                    val_return = (res, None)

                if self.updateRange:
                    tmp = val_return[0]

                    Min = tmp.min(axis=0)
                    Max = tmp.max(axis=0)

                    if np.isscalar(Min):
                        Min = [Min]
                        Max = [Max]

                    if len(Min) != len(self.dataRange):
                        self.dataRange = tuple(
                            Bounds(m, M) for m, M in zip(Min, Max))
                    else:
                        for b, m, M in zip(self.dataRange, Min, Max):
                            b.update(m)
                            b.update(M)

                return val_return