Exemple #1
0
def test_datashader_image_nan():
    '''Test the datashader image stretch function works correctly 
    if the inputs 2D arraysi containing NaNs in both coordinates and 
    values.
    
    Tests the modification made in commit 05962ec'''
    x = numpy.array(
        [[numpy.nan, numpy.nan, numpy.nan, numpy.nan, numpy.nan, numpy.nan],
         [numpy.nan, numpy.nan, numpy.nan, numpy.nan, numpy.nan, numpy.nan],
         [numpy.nan, numpy.nan, numpy.nan, numpy.nan, numpy.nan, numpy.nan],
         [-32.12598, -32.07721, -32.02848, numpy.nan, numpy.nan, numpy.nan],
         [-32.15292, -32.104084, -32.055298, numpy.nan, numpy.nan, numpy.nan],
         [-32.17996, -32.13106, -32.08221, numpy.nan, numpy.nan, numpy.nan]])
    y = numpy.array(
        [[numpy.nan, numpy.nan, numpy.nan, numpy.nan, numpy.nan, numpy.nan],
         [numpy.nan, numpy.nan, numpy.nan, numpy.nan, numpy.nan, numpy.nan],
         [numpy.nan, numpy.nan, numpy.nan, numpy.nan, numpy.nan, numpy.nan],
         [-40.60027, -40.596905, -40.593544, numpy.nan, numpy.nan, numpy.nan],
         [-40.643642, -40.640266, -40.636898, numpy.nan, numpy.nan, numpy.nan],
         [-40.68706, -40.683674, -40.680298, numpy.nan, numpy.nan, numpy.nan]])
    z = numpy.array(
        [[numpy.nan, numpy.nan, numpy.nan, numpy.nan, numpy.nan, numpy.nan],
         [numpy.nan, numpy.nan, numpy.nan, numpy.nan, numpy.nan, numpy.nan],
         [276.23, 273.04, 270.75, numpy.nan, numpy.nan, numpy.nan],
         [277.12, 273.55, 270.82, numpy.nan, numpy.nan, numpy.nan],
         [numpy.nan, 273.24, 270.16998, numpy.nan, numpy.nan, numpy.nan],
         [numpy.nan, numpy.nan, numpy.nan, numpy.nan, numpy.nan, numpy.nan]])

    result = geo.stretch_image(x, y, z)

    #this will fail on a ValueError if the NaNs above are handled improperly
    serialize_json(result)

    #Should be returning NumPy masked arrays
    assert numpy.ma.is_masked(result['image'][0])
Exemple #2
0
def load_image_pts(path, variable, pts_3d, pts_4d):
    key = (path, variable, pts_hash(pts_3d), pts_hash(pts_4d))
    if key in IMAGES:
        return IMAGES[key]
    else:
        try:
            lons, lats, values, units = _load_netcdf4(path, variable, pts_3d,
                                                      pts_4d)
        except:
            lons, lats, values, units = _load_cube(path, variable, pts_3d,
                                                   pts_4d)

    # Units
    if variable in ["precipitation_flux", "stratiform_rainfall_rate"]:
        if units == "mm h-1":
            values = values
        else:
            values = convert_units(values, units, "kg m-2 hour-1")
    elif units == "K":
        values = convert_units(values, "K", "Celsius")

    # Coarsify images
    threshold = 200 * 200  # Chosen since TMA WRF is 199 x 199
    if values.size > threshold:
        fraction = 0.25
    else:
        fraction = 1.
    lons, lats, values = coarsify(lons, lats, values, fraction)

    image = geo.stretch_image(lons, lats, values)
    IMAGES[key] = image
    return image
Exemple #3
0
 def load_image(self, path, itime):
     lons = self.longitudes
     lats = self.latitudes
     with netCDF4.Dataset(path) as dataset:
         values = dataset.variables["data"][itime]
     fraction = 0.25
     lons, lats, values = coarsify(lons, lats, values, fraction)
     return geo.stretch_image(lons, lats, values)
Exemple #4
0
    def image(self, state):
        '''gets actual data. 

        X and Y passed to :meth:`geo.stretch_image` must be 1D arrays. NWCSAF data 
        are not on a regular grid so must be regridded.

        `values` passed to :meth:`geo.stretch_image` must be a NumPy Masked Array. 

        :param state: Bokeh State object of info from UI
        :returns: Output data from :meth:`geo.stretch_image`'''
        data = empty_image()
        for nc in self.locator._sets:
            if str(
                    datetime.datetime.strptime(
                        nc.nominal_product_time.replace('Z', 'UTC'),
                        '%Y-%m-%dT%H:%M:%S%Z')
            ) == state.valid_time and self.locator.varlist[
                    state.variable] in nc.variables:
                #regrid to regular grid
                x = nc['lon'][:].flatten()  # lat & lon both 2D arrays
                y = nc['lat'][:].flatten()  #
                z = nc[self.locator.varlist[state.variable]][:].flatten()

                #define grid
                xi, yi = np.meshgrid(
                    np.linspace(x.min(), x.max(), nc.dimensions['nx'].size),
                    np.linspace(y.min(), y.max(), nc.dimensions['ny'].size),
                )

                zi = griddata(np.array([x, y]).transpose(),
                              z, (xi, yi),
                              method='linear',
                              fill_value=np.nan)

                zi = np.ma.masked_invalid(zi, copy=False)
                zi = np.ma.masked_outside(
                    zi,
                    nc[self.locator.varlist[state.variable]].valid_range[0],
                    nc[self.locator.varlist[state.variable]].valid_range[1],
                    copy=False)
                data = geo.stretch_image(xi[0, :], yi[:, 0], zi)
                #data = geo.stretch_image(x[0,:], y[:,0], nc[state.variable][:])
                data.update(
                    coordinates(state.valid_time, state.initial_time,
                                state.pressures, state.pressure))
                data.update({
                    'name':
                    [str(nc[self.locator.varlist[state.variable]].long_name)],
                })
                if 'units' in nc[self.locator.varlist[
                        state.variable]].ncattrs():
                    data.update({
                        'units':
                        [str(nc[self.locator.varlist[state.variable]].units)]
                    })

        return data
    def load_image(self, path, itime):
        lons = self.longitudes
        lats = self.latitudes
        with xarray.open_dataset(path, engine=ENGINE) as nc:
            values = nc["data"][itime].values

        # Use datashader to coarsify images from 4.4km to 8.8km grid
        scale = 2
        return geo.stretch_image(lons,
                                 lats,
                                 values,
                                 plot_width=int(values.shape[1] / scale),
                                 plot_height=int(values.shape[0] / scale))
 def image(self, state):
     cube = self._cubes[state.variable]
     valid_datetime = _to_datetime(state.valid_time)
     cube = self.extract_cube(cube, valid_datetime)
     if cube is None:
         data = empty_image()
     else:
         data = geo.stretch_image(
             cube.coord('longitude').points,
             cube.coord('latitude').points, cube.data)
         data.update(
             coordinates(state.valid_time, state.initial_time,
                         state.pressures, state.pressure))
         data.update({'name': [self._label], 'units': [str(cube.units)]})
     return data
 def _image(self, long_name, initial_time, valid_time, pressures, pressure):
     data = empty_image()
     paths = self.locator.glob()
     long_name_to_variable = self.locator.long_name_to_variable(paths)
     frequency = dt.timedelta(minutes=15)  # TODO: Support arbitrary frequencies
     for path in self.locator.find_paths(paths, valid_time, frequency):
         with xarray.open_dataset(path) as nc:
             if long_name not in long_name_to_variable:
                 continue
             x = np.ma.masked_invalid(nc['lon'])[:]
             y = np.ma.masked_invalid(nc['lat'])[:]
             var = nc[long_name_to_variable[long_name]]
             z = np.ma.masked_invalid(var)[:]
             data = geo.stretch_image(x, y, z)
             data.update(coordinates(valid_time, initial_time, pressures, pressure))
             data['name'] = [str(var.long_name)]
             if 'units' in var.attrs:
                 data['units'] = [str(var.units)]
     return data
Exemple #8
0
def load_image_pts(path, variable, pts_3d, pts_4d):
    key = (path, variable, pts_hash(pts_3d), pts_hash(pts_4d))
    if key in IMAGES:
        return IMAGES[key]
    else:
        try:
            lons, lats, values, units = _load_netcdf4(path, variable, pts_3d,
                                                      pts_4d)
        except:
            lons, lats, values, units = _load_cube(path, variable, pts_3d,
                                                   pts_4d)

    # Units
    if variable in ["precipitation_flux", "stratiform_rainfall_rate"]:
        if units == "mm h-1":
            values = values
        else:
            values = convert_units(values, units, "kg m-2 hour-1")
    elif units == "K":
        values = convert_units(values, "K", "Celsius")

    # Coarsify images
    threshold = 200 * 200  # Chosen since TMA WRF is 199 x 199
    if values.size > threshold:
        fraction = 0.25
    else:
        fraction = 1.
    lons, lats, values = coarsify(lons, lats, values, fraction)

    # Roll input data into [-180, 180] range
    if np.any(lons > 180.0):
        shift_by = np.sum(lons > 180.0)
        lons[lons > 180.0] -= 360.
        lons = np.roll(lons, shift_by)
        values = np.roll(values, shift_by, axis=1)

    image = geo.stretch_image(lons, lats, values)
    print(image)
    IMAGES[key] = image
    return image
Exemple #9
0
    def image(self, state):
        cube = self._cubes[state.variable]
        valid_datetime = _to_datetime(state.valid_time)
        cube = cube.extract(iris.Constraint(time=valid_datetime))

        if cube is None:
            data = empty_image()
        else:
            data = geo.stretch_image(
                cube.coord("longitude").points,
                cube.coord("latitude").points,
                cube.data,
            )
            data.update(
                coordinates(
                    state.valid_time,
                    state.initial_time,
                    state.pressures,
                    state.pressure,
                ))
            data.update({"name": [self._label], "units": [str(cube.units)]})
        return data
Exemple #10
0
    def load_image(cls, path, variable, pts):
        """Load bokeh image glyph data from file using slices"""
        try:
            lons, lats, values, units = cls._load_xarray(path, variable, pts)
        except:
            lons, lats, values, units = cls._load_cube(path, variable, pts)

        # Units
        if variable in ["precipitation_flux", "stratiform_rainfall_rate"]:
            if units == "mm h-1":
                values = values
            else:
                values = forest.util.convert_units(values, units, "kg m-2 hour-1")
                units = "kg m-2 hour-1"
        elif units == "K":
            values = forest.util.convert_units(values, "K", "Celsius")
            units = "C"

        # Coarsify images
        threshold = 200 * 200  # Chosen since TMA WRF is 199 x 199
        if values.size > threshold:
            fraction = 0.25
        else:
            fraction = 1.
        lons, lats, values = forest.util.coarsify(
            lons, lats, values, fraction)

        # Roll input data into [-180, 180] range
        if np.any(lons > 180.0):
            shift_by = np.sum(lons > 180.0)
            lons[lons > 180.0] -= 360.
            lons = np.roll(lons, shift_by)
            values = np.roll(values, shift_by, axis=1)

        data = geo.stretch_image(lons, lats, values)
        data["units"] = [units]
        return data
Exemple #11
0
def _get_bokeh_image(
    cube,
    experiment_id,
    variable_id,
    institution_id,
    initial_time,
    member_id,
    selected_time,
    pressure,
):
    """
    A helper function to do  the creation of the image dict required by bokeh.
    This includes downloading the actual data required for the current view, so
    this function is cached to reduce remote queries.
    """
    def time_comp(select_time, time_cell):  #
        data_time = util.to_datetime(time_cell.point)
        if abs((select_time - data_time).days) < 2:
            return True
        return False

    def lat_filter(lat):
        """
        Due to the way the current projection of gridded data works, the poles are
        not well handled, resulting in NaNs if we use the full range of latitudes.
        The current hack is to chop off latitude greater than 85 degrees north and
        south. Given the importance of data at the poles in climate change research,
        we will need to fix this in future.
        """
        return -85.0 < lat < 85.0

    def pressure_select(select_pressure, data_pressure):
        return abs(select_pressure - data_pressure.point) < 1.0

    if cube is None or initial_time is None:
        data = gridded_forecast.empty_image()
    else:
        constraint_dict = {
            'time': functools.partial(time_comp, selected_time),
            'latitude': lat_filter,
        }
        coord_names = [c1.name() for c1 in cube.coords()]
        if 'air_pressure' in coord_names:
            constraint_dict['air_pressure'] = functools.partial(
                pressure_select,
                pressure,
            )
        cube_cropped = cube.extract(iris.Constraint(**constraint_dict))
        lat_pts = cube_cropped.coord('latitude').points
        long_pts = cube_cropped.coord('longitude').points - 180.0
        cube_data_cropped = cube_cropped.data
        cube_width = int(cube_data_cropped.shape[1] / 2)
        cube_data_cropped = numpy.concatenate([
            cube_data_cropped[:, cube_width:],
            cube_data_cropped[:, :cube_width]
        ],
                                              axis=1)

        data = geo.stretch_image(long_pts, lat_pts, cube_data_cropped)
        data['image'] = [
            numpy.ma.masked_array(data['image'][0],
                                  mask=numpy.isnan(data['image'][0]))
        ]
        return data
Exemple #12
0
 def load_image(self, imageData):
     return geo.stretch_image(imageData["longitude"], imageData["latitude"],
                              imageData["data"])