Esempio n. 1
0
def interpolate_nearest(x, y, z, x_val, y_val, z_val):
    """Neirest neighbour interpolation.

    Parameters
    ----------
    x : np.ndarray
        x-faces or x-edges of a mesh
    y : np.ndarray
        y-faces or y-edges of a mesh
    z : np.ndarray
        z-faces or z-edges of a mesh
    x_val : np.ndarray
        curl values or electric field values in the x-direction
    y_val : np.ndarray
        curl values or electric field values in the y-direction
    z_val : np.ndarray
        curl values or electric field values in the z-direction

    Returns
    -------
    scipy.interpolate.ndgriddata.NearestNDInterpolator
        a neirest neighbour interpolation object
    """
    x_interpolated = NearestNDInterpolator(x, x_val)
    y_interpolated = NearestNDInterpolator(y, y_val)
    z_interpolated = NearestNDInterpolator(z, z_val)

    return x_interpolated, y_interpolated, z_interpolated
Esempio n. 2
0
    def fit(self):
        """
        Defines the interpolation functions from the raw data.
        """
        
        # Ignore a1, a2=1.0 values if included
        shortdata = self.data[~(np.isclose(self.data.a1, 1.0) | np.isclose(self.data.a2, 1.0))]
        
        # Create supercells of values
        a1 = shortdata.a1
        a2 = shortdata.a2
        a1 = np.concatenate([a1-1, a1-1, a1-1, a1, a1, a1, a1+1, a1+1, a1+1])
        a2 = np.concatenate([a2-1, a2, a2+1, a2-1, a2, a2+1, a2-1, a2, a2+1])

        # Find values in 0-1 cell +- one point
        ua1 = np.unique(a1)
        ua2 = np.unique(a2)
        a1min = ua1[np.where(np.isclose(ua1, 0.0))[0][0] - 1] - 1e-8
        a1max = ua1[np.where(np.isclose(ua1, 1.0))[0][-1] + 1] + 1e-8
        a2min = ua2[np.where(np.isclose(ua2, 0.0))[0][0] - 1] - 1e-8
        a2max = ua2[np.where(np.isclose(ua2, 1.0))[0][-1] + 1] + 1e-8
        ix = np.where((a1 >= a1min) & (a1 <= a1max) & (a2 >= a2min) & (a2 <= a2max))
        
        # Fit energy
        E_gsf = np.concatenate([shortdata.E_gsf] * 9)
        self.__E_gsf_fit = Rbf(a1[ix], a2[ix], E_gsf[ix])
        self.__E_gsf_nearest = NearestNDInterpolator(np.array([a1[ix], a2[ix]]).T, E_gsf[ix])
        
        # Fit delta
        if 'delta' in self.data:
            delta = np.concatenate([shortdata.delta] * 9)
            self.__delta_fit = Rbf(a1[ix], a2[ix], delta[ix])
            self.__delta_nearest = NearestNDInterpolator(np.array([a1[ix], a2[ix]]).T, delta[ix])
Esempio n. 3
0
def test_nearest_options():
    # smoke test that NearestNDInterpolator accept cKDTree options
    npts, nd = 4, 3
    x = np.arange(npts * nd).reshape((npts, nd))
    y = np.arange(npts)
    nndi = NearestNDInterpolator(x, y)

    opts = {'balanced_tree': False, 'compact_nodes': False}
    nndi_o = NearestNDInterpolator(x, y, tree_options=opts)
    assert_allclose(nndi(x), nndi_o(x), atol=1e-14)
Esempio n. 4
0
def test_nearest_list_argument():
    nd = np.array([[0, 0, 0, 0, 1, 0, 1], [0, 0, 0, 0, 0, 1, 1],
                   [0, 0, 0, 0, 1, 1, 2]])
    d = nd[:, 3:]

    # z is np.array
    NI = NearestNDInterpolator((d[0], d[1]), d[2])
    assert_array_equal(NI([0.1, 0.9], [0.1, 0.9]), [0, 2])

    # z is list
    NI = NearestNDInterpolator((d[0], d[1]), list(d[2]))
    assert_array_equal(NI([0.1, 0.9], [0.1, 0.9]), [0, 2])
Esempio n. 5
0
def get_shifter_from_centers(centers5,
                             locs,
                             maxshift=8,
                             plot=True,
                             nearest=True):

    corrgrid = deepcopy(centers5)

    a = np.where(corrgrid > maxshift)
    corrgrid[a] = maxshift
    b = np.where(corrgrid < -maxshift)
    corrgrid[b] = -maxshift

    corrgrid[np.isnan(corrgrid)] = 0
    Npos = corrgrid.shape[0]
    coords = np.zeros([Npos * Npos, 4])
    nn = 0
    if plot:
        fig = plt.figure(figsize=(12, 8))
        ax = fig.gca()

    for xx in range(Npos):
        for yy in range(Npos):
            coords[nn, :] = [
                locs[xx], locs[yy], corrgrid[xx, yy, 0], corrgrid[xx, yy, 1]
            ]
            nn += 1
            if plot:
                plt.plot([locs[xx], centers5[xx, yy, 0] + locs[xx]],
                         [locs[yy], centers5[xx, yy, 1] + locs[yy]], 'c')
                plt.plot(centers5[xx, yy, 0] + locs[xx],
                         centers5[xx, yy, 1] + locs[yy], 'rx')

    if plot:
        plt.axis('equal')
        ax.set_xticks(locs)
        ax.set_yticks(locs)
        ax.grid()
        ax.grid(linestyle='-', linewidth='0.5', color='cyan')
        plt.figure()
        plt.imshow(corrgrid[:, :, 0], vmin=-maxshift, vmax=maxshift)
        plt.colorbar()

    if nearest:
        xcorrec = NearestNDInterpolator(coords[:, 0:2], coords[:, 2])
        ycorrec = NearestNDInterpolator(coords[:, 0:2], coords[:, 3])
    else:
        xcorrec = LinearNDInterpolator(coords[:, 0:2], coords[:, 2])
        ycorrec = LinearNDInterpolator(coords[:, 0:2], coords[:, 3])

    return xcorrec, ycorrec
Esempio n. 6
0
def fill_missing(mask, x, y, u, v):
    """Fill missing value with by interpolating the values from nearest
    neighbours"""
    # Construct the interpolators from the boundary values surrounding the
    # missing values.
    boundaries = find_boundaries(u.mask, mode='outer')
    points = np.stack((y[boundaries], x[boundaries])).T
    ip_u = NearestNDInterpolator(points, u[boundaries], rescale=False)
    ip_v = NearestNDInterpolator(points, v[boundaries], rescale=False)

    # Interpolate only missing values.
    missing = (y[mask], x[mask])
    u[mask] = ip_u(missing)
    v[mask] = ip_v(missing)
Esempio n. 7
0
def load_iemre():
    """Use IEM Reanalysis for non-precip data

    24km product is smoothed down to the 0.01 degree grid
    """
    printt("load_iemre() called")
    xaxis = np.arange(MYWEST, MYEAST, 0.01)
    yaxis = np.arange(MYSOUTH, MYNORTH, 0.01)
    xi, yi = np.meshgrid(xaxis, yaxis)

    fn = iemre.get_daily_ncname(VALID.year)
    if not os.path.isfile(fn):
        printt("Missing %s for load_solar, aborting" % (fn, ))
        sys.exit()
    with ncopen(fn) as nc:
        offset = iemre.daily_offset(VALID)
        lats = nc.variables["lat"][:]
        lons = nc.variables["lon"][:]
        lons, lats = np.meshgrid(lons, lats)

        # Storage is W m-2, we want langleys per day
        data = nc.variables["rsds"][offset, :, :] * 86400.0 / 1000000.0 * 23.9
        # Default to a value of 300 when this data is missing, for some reason
        nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)),
                                   np.ravel(data))
        SOLAR[:] = iemre_bounds_check("rsds", nn(xi, yi), 0, 1000)

        data = temperature(nc.variables["high_tmpk"][offset, :, :],
                           "K").value("C")
        nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)),
                                   np.ravel(data))
        HIGH_TEMP[:] = iemre_bounds_check("high_tmpk", nn(xi, yi), -60, 60)

        data = temperature(nc.variables["low_tmpk"][offset, :, :],
                           "K").value("C")
        nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)),
                                   np.ravel(data))
        LOW_TEMP[:] = iemre_bounds_check("low_tmpk", nn(xi, yi), -60, 60)

        data = temperature(nc.variables["avg_dwpk"][offset, :, :],
                           "K").value("C")
        nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)),
                                   np.ravel(data))
        DEWPOINT[:] = iemre_bounds_check("avg_dwpk", nn(xi, yi), -60, 60)

        data = nc.variables["wind_speed"][offset, :, :]
        nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)),
                                   np.ravel(data))
        WIND[:] = iemre_bounds_check("wind_speed", nn(xi, yi), 0, 30)
    printt("load_iemre() finished")
Esempio n. 8
0
def load_iemre():
    """Use IEM Reanalysis for non-precip data

    24km product is smoothed down to the 0.01 degree grid
    """
    printt("load_iemre() called")
    xaxis = np.arange(WEST, EAST, 0.01)
    yaxis = np.arange(SOUTH, NORTH, 0.01)
    xi, yi = np.meshgrid(xaxis, yaxis)

    fn = "/mesonet/data/iemre/%s_mw_daily.nc" % (VALID.year,)
    if not os.path.isfile(fn):
        printt("Missing %s for load_solar, aborting" % (fn,))
        sys.exit()
    nc = netCDF4.Dataset(fn, 'r')
    offset = iemre.daily_offset(VALID)
    lats = nc.variables['lat'][:]
    lons = nc.variables['lon'][:]
    lons, lats = np.meshgrid(lons, lats)

    # Storage is W m-2, we want langleys per day
    data = nc.variables['rsds'][offset, :, :] * 86400. / 1000000. * 23.9
    # Default to a value of 300 when this data is missing, for some reason
    nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)),
                               np.ravel(data))
    SOLAR[:] = iemre_bounds_check('rsds', nn(xi, yi), 0, 1000)

    data = temperature(nc.variables['high_tmpk'][offset, :, :], 'K').value('C')
    nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)),
                               np.ravel(data))
    HIGH_TEMP[:] = iemre_bounds_check('high_tmpk', nn(xi, yi), -60, 60)

    data = temperature(nc.variables['low_tmpk'][offset, :, :], 'K').value('C')
    nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)),
                               np.ravel(data))
    LOW_TEMP[:] = iemre_bounds_check('low_tmpk', nn(xi, yi), -60, 60)

    data = temperature(nc.variables['avg_dwpk'][offset, :, :], 'K').value('C')
    nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)),
                               np.ravel(data))
    DEWPOINT[:] = iemre_bounds_check('avg_dwpk', nn(xi, yi), -60, 60)

    data = nc.variables['wind_speed'][offset, :, :]
    nn = NearestNDInterpolator((np.ravel(lons), np.ravel(lats)),
                               np.ravel(data))
    WIND[:] = iemre_bounds_check('wind_speed', nn(xi, yi), 0, 30)

    nc.close()
    printt("load_iemre() finished")
Esempio n. 9
0
    def perform_upsampling_labels(self,image, labels):
        if (self.paras['verbose']>0): print ("Perform upsampling", flush=True)
        
        step = self.paras['step']
        shape_image = image.shape
        shape_labels = labels.shape
        
        x_index = np.arange(shape_image[0]) - self.paras['window_x'] - self.paras['patch_x']
        y_index = np.arange(shape_image[1]) - self.paras['window_y'] - self.paras['patch_y']
        x_grid, y_grid = np.meshgrid(x_index, y_index,indexing='ij')
        x_grid = (x_grid / step).flatten()
        y_grid = (y_grid / step).flatten()
        coords = np.vstack((x_grid, y_grid)).T
        if self.paras['soft_segmentation'] is False:
            x_labels, y_labels = np.meshgrid(np.arange(shape_labels[0]),np.arange(shape_labels[1]),indexing='ij')
            x_labels = x_labels.flatten()
            y_labels = y_labels.flatten()
            input_coords = np.vstack([x_labels, y_labels]).T
            interpolator = NearestNDInterpolator(input_coords, labels.flatten())
            labels_up = interpolator(coords)
            labels_up = np.reshape(labels_up, shape_image)
            #labels_up = np.round(labels_up).astype(np.int32) % self.paras['n_patterns']
        else:
            labels_up = map_coordinates(labels, coords,mode='nearest')
            labels_up = np.reshape(labels_up, (shape_image[0], shape_image[1]))
            labels_up = np.clip(labels_up,0.,self.paras['n_patterns']-1.)
        
        if self.paras['sort_labels_by_pattern_size'] is True:
            labels_up = self.sort_labels_by_pattern_size(labels_up)  

        return labels_up
Esempio n. 10
0
def nearest_neighbor_advection(im, flow):
    """

    Parameters
    ----------
    im : np.ndarray
        Shape: (batch_size, C, H, W)
    flow : np.ndarray
        Shape: (batch_size, 2, H, W)
    Returns
    -------
    new_im : nd.NDArray
    """
    predict_frame = np.empty(im.shape, dtype=im.dtype)
    batch_size, channel_num, height, width = im.shape
    assert channel_num == 1
    grid_x, grid_y = np.meshgrid(np.arange(width), np.arange(height))
    interp_grid = np.hstack([grid_x.reshape((-1, 1)), grid_y.reshape((-1, 1))])
    for i in range(batch_size):
        flow_interpolator = NearestNDInterpolator(interp_grid, im[i].ravel())
        predict_grid = interp_grid + np.hstack(
            [flow[i][0].reshape((-1, 1)), flow[i][1].reshape((-1, 1))])
        predict_frame[i, 0, ...] = flow_interpolator(predict_grid).reshape(
            (height, width))
    return predict_frame
Esempio n. 11
0
def do_coop(ts):
    """Use COOP solar radiation data"""
    pgconn = get_dbconn('coop', user='******')
    cursor = pgconn.cursor()

    cursor.execute("""SELECT ST_x(geom), ST_y(geom),
        coalesce(narr_srad, merra_srad) from alldata a JOIN stations t
        ON (a.station = t.id) WHERE
        day = %s and t.network ~* 'CLIMATE' and substr(id, 3, 1) != 'C'
        and substr(id, 3, 4) != '0000'
    """, (ts.strftime("%Y-%m-%d"), ))
    lons = []
    lats = []
    vals = []
    for row in cursor:
        if row[2] is None or row[2] < 0:
            continue
        lons.append(row[0])
        lats.append(row[1])
        vals.append(row[2])

    nn = NearestNDInterpolator((np.array(lons), np.array(lats)),
                               np.array(vals))
    xi, yi = np.meshgrid(iemre.XAXIS, iemre.YAXIS)

    ds = iemre.get_grids(ts.date(), varnames='rsds')
    # Convert MJ/d to Wm2
    ds['rsds'].values = nn(xi, yi) * 1000000. / 86400.
    iemre.set_grids(ts.date(), ds)
    subprocess.call(
        "python db_to_netcdf.py %s" % (ts.strftime("%Y %m %d"), ),
        shell=True)
Esempio n. 12
0
    def _build_vertex_values(self, items, area_func, value_func):
        """
        Interpolate vertice with known altitudes to get altitudes for the remaining ones.
        """
        vertex_values = np.empty(self.vertices.shape[:1], dtype=np.int32)
        if not vertex_values.size:
            return vertex_values
        vertex_value_mask = np.full(self.vertices.shape[:1], fill_value=False, dtype=np.bool)

        for item in items:
            faces = area_func(item).faces
            if not faces:
                continue
            i_vertices = np.unique(self.faces[np.array(tuple(chain(*faces)))].flatten())
            vertex_values[i_vertices] = value_func(item, i_vertices)
            vertex_value_mask[i_vertices] = True

        if np.any(vertex_value_mask) and not np.all(vertex_value_mask):
            interpolate = NearestNDInterpolator(self.vertices[vertex_value_mask],
                                                vertex_values[vertex_value_mask])
            vertex_values[np.logical_not(vertex_value_mask)] = interpolate(
                *np.transpose(self.vertices[np.logical_not(vertex_value_mask)])
            )

        return vertex_values
Esempio n. 13
0
def interpolate_timeseries(project, data):
    im_soft = np.load(os.path.join(ecm.INPUT_DIR, 'im_soft.npz'))['arr_0']
    x_len, y_len = im_soft.shape
    net = project.network
    res_Ts = net.throats('spm_resistor')
    sorted_res_Ts = net['throat.spm_resistor_order'][res_Ts].argsort()
    res_pores = net['pore.coords'][net['throat.conns'][res_Ts[sorted_res_Ts]]]
    res_Ts_coords = np.mean(res_pores, axis=1)
    x = res_Ts_coords[:, 0]
    y = res_Ts_coords[:, 1]
    all_x = []
    all_y = []
    all_t = []
    all_data = []
    for t in range(data.shape[0]):
        all_x = all_x + x.tolist()
        all_y = all_y + y.tolist()
        all_t = all_t + (np.ones(len(x)) * t).tolist()
        all_data = all_data + data[t, :].tolist()
    all_x = np.asarray(all_x)
    all_y = np.asarray(all_y)
    all_t = np.asarray(all_t)
    all_data = np.asarray(all_data)
    points = np.vstack((all_x, all_y, all_t)).T
    myInterpolator = NearestNDInterpolator(points, all_data)
    return myInterpolator
Esempio n. 14
0
def interpolate_dataset(
    points: NDArray[(2, Any), float],
    values: NDArray[(Any, Any), float],
    target_points: NDArray[(2, Any), float],
    method: str,
) -> NDArray[(Any,), float]:

    """Generates standard climpyrical xarray Dataset.
    ------------------------------
    Args:
        points (np.ndarray): ordered pairs of coordinates
            from current grid
        values (np.ndarray): field values at points
        target_points (np.ndarray): ordered pairs of coordinates
            from target grid
        method (str): desired method - can be either 'linear' or
            'nearest'
    Returns:
        (np.ndarray): newly predicted values at target points
    """

    if method != "linear" and method != "nearest":
        raise ValueError("Method must be linear or nearest.")

    method_dict = {
        "linear": LinearNDInterpolator(points, values),
        "nearest": NearestNDInterpolator(points, values),
    }

    f = method_dict[method]

    return f(target_points).T
def create_climate_geoGrid_interpolator_from_json_file(
        path_to_latlon_to_rowcol_file, worldGeodeticSys84, geoTargetGrid,
        cdict):
    "create interpolator from json list of lat/lon to row/col mappings"
    with open(path_to_latlon_to_rowcol_file) as _:
        points = []
        values = []

        transformer = Transformer.from_crs(worldGeodeticSys84,
                                           geoTargetGrid,
                                           always_xy=True)

        for latlon, rowcol in json.load(_):
            row, col = rowcol
            clat, clon = latlon
            try:
                cr_geoTargetGrid, ch_geoTargetGrid = transformer.transform(
                    clon, clat)
                cdict[(row, col)] = (round(clat, 4), round(clon, 4))
                points.append([cr_geoTargetGrid, ch_geoTargetGrid])
                values.append((row, col))
                #print "row:", row, "col:", col, "clat:", clat, "clon:", clon, "h:", h, "r:", r, "val:", values[i]
            except:
                continue

        return NearestNDInterpolator(np.array(points), np.array(values))
Esempio n. 16
0
    def __init__(self, scenario: model.Scenario):
        """Initialize the model."""
        super().__init__(scenario)

        s = self._scenario

        flag_rs = flag_ss = flag_ns = 0
        if s.mechanism == 'SS':
            flag_ss = 1
        elif s.mechanism == 'NS':
            flag_ns = 1
        elif s.mechanism == 'RS':
            flag_rs = 1

        event = (s.mag, s.depth_hyp, flag_rs, flag_ss, flag_ns, s.dist_jb,
                 s.v_s30)

        global INTERPOLATOR
        if INTERPOLATOR is None:
            with np.load(fname_data) as data:
                INTERPOLATOR = NearestNDInterpolator(data['events'],
                                                     data['predictions'])
        prediction = INTERPOLATOR(event)
        self._ln_resp = prediction[0::2]
        self._ln_std = np.sqrt(prediction[1::2])
Esempio n. 17
0
def make_nearest_interpolator_unstructured(field, grid=None):
    '''Make a nearest interpolator for an unstructured grid.

	Parameters
	----------
	field : Field or array_like
		The field to interpolate.
	grid : Grid or None
		The grid of the field. If it is given, the grid of `field` is replaced by this grid.
	
	Returns
	-------
	Field generator
		The interpolator as a Field generator. The grid on which this field generator will be evaluated does
		not need to have any structure.
	'''
    if grid is None:
        grid = field.grid
    else:
        field = Field(field, grid)

    interp = NearestNDInterpolator(grid.points, field, fill_value)

    def interpolator(evaluated_grid):
        res = interp(grid.points)
        return Field(res, evaluated_grid)

    return interpolator
Esempio n. 18
0
def copy_to_iemre(valid):
    """verbatim copy over to IEMRE."""
    tidx = iemre.hourly_offset(valid)
    nc = ncopen(("/mesonet/data/stage4/%s_stage4_hourly.nc") % (valid.year, ),
                'a',
                timeout=300)
    lats = nc.variables['lat'][:]
    lons = nc.variables['lon'][:]
    val = nc.variables['p01m'][tidx]
    nc.close()

    # Our data is 4km, iemre is 0.125deg, so we stride some to cut down on mem
    stride = slice(None, None, 3)
    lats = np.ravel(lats[stride, stride])
    lons = np.ravel(lons[stride, stride])
    vals = np.ravel(val[stride, stride])
    nn = NearestNDInterpolator((lons, lats), vals)
    xi, yi = np.meshgrid(iemre.XAXIS, iemre.YAXIS)
    res = nn(xi, yi)

    # Lets clip bad data
    # 10 inches per hour is bad data
    res = np.where(np.logical_or(res < 0, res > 250), 0., res)

    # Open up our RE file
    nc = ncopen(iemre.get_hourly_ncname(valid.year), 'a', timeout=300)
    nc.variables["p01m"][tidx, :, :] = res
    LOG.debug("wrote data to hourly IEMRE min: %.2f avg: %.2f max: %.2f",
              np.min(res), np.mean(res), np.max(res))
    nc.close()
Esempio n. 19
0
def merge(valid):
    """
    Process an hour's worth of stage4 data into the hourly RE
    """
    nc = netCDF4.Dataset(
        ("/mesonet/data/stage4/%s_stage4_hourly.nc") % (valid.year, ), 'r')
    tidx = iemre.hourly_offset(valid)
    val = nc.variables['p01m'][tidx, :, :]
    # print("stage4 mean: %.2f max: %.2f" % (np.mean(val), np.max(val)))
    lats = nc.variables['lat'][:]
    lons = nc.variables['lon'][:]

    # Rough subsample, since the whole enchillata is too much
    lats = np.ravel(lats[200:-100:5, 300:900:5])
    lons = np.ravel(lons[200:-100:5, 300:900:5])
    vals = np.ravel(val[200:-100:5, 300:900:5])
    nn = NearestNDInterpolator((lons, lats), vals)
    xi, yi = np.meshgrid(iemre.XAXIS, iemre.YAXIS)
    res = nn(xi, yi)

    # Lets clip bad data
    # 10 inches per hour is bad data
    res = np.where(np.logical_or(res < 0, res > 250), 0., res)
    # print("Resulting mean: %.2f max: %.2f" % (np.mean(res), np.max(res)))

    # Open up our RE file
    nc = netCDF4.Dataset(
        "/mesonet/data/iemre/%s_mw_hourly.nc" % (valid.year, ), 'a')
    nc.variables["p01m"][tidx, :, :] = res
    # print(("Readback mean: %.2f max: %.2f"
    #       ) % (np.mean(nc.variables["p01m"][tidx, :, :]),
    #            np.max(nc.variables["p01m"][tidx, :, :])))
    nc.close()
Esempio n. 20
0
def do_coop(ts):
    """Use COOP solar radiation data"""
    pgconn = psycopg2.connect(database='coop', host='iemdb', user='******')
    cursor = pgconn.cursor()

    cursor.execute(
        """SELECT ST_x(geom), ST_y(geom),
        coalesce(narr_srad, merra_srad) from alldata a JOIN stations t
        ON (a.station = t.id) WHERE
        day = %s and t.network ~* 'CLIMATE' and substr(id, 3, 1) != 'C'
        and substr(id, 3, 4) != '0000'
    """, (ts.strftime("%Y-%m-%d"), ))
    lons = []
    lats = []
    vals = []
    for row in cursor:
        if row[2] is None or row[2] < 0:
            continue
        lons.append(row[0])
        lats.append(row[1])
        vals.append(row[2])

    nn = NearestNDInterpolator((np.array(lons), np.array(lats)),
                               np.array(vals))
    xi, yi = np.meshgrid(iemre.XAXIS, iemre.YAXIS)

    nc = netCDF4.Dataset("/mesonet/data/iemre/%s_mw_daily.nc" % (ts.year, ),
                         'a')
    offset = iemre.daily_offset(ts)
    # Data above is MJ / d / m-2, we want W / m-2
    nc.variables['rsds'][offset, :, :] = nn(xi, yi) * 1000000. / 86400.
    nc.close()
Esempio n. 21
0
	def checklabels(self,inlabels,**kwargs):
		# a function that allows the user to determine the nearest C3K labels to array on input labels
		# useful to run before actually selecting spectra

		labels = []

		for li in inlabels:
			# select the C3K spectra at that [Fe/H] and [alpha/Fe]
			teff_i  = li[0]
			logg_i  = li[1]
			FeH_i   = li[2]
			alpha_i = li[3]

			# find nearest value to FeH and aFe
			FeH_i   = self.FeHarr[np.argmin(np.abs(self.FeHarr-FeH_i))]
			alpha_i = self.alphaarr[np.argmin(np.abs(self.alphaarr-alpha_i))]

			# select the C3K spectra for these alpha and FeH
			C3K_i = self.C3K[alpha_i][FeH_i]

			# create array of all labels in specific C3K file
			C3Kpars = np.array(C3K_i['parameters'])

			# do a nearest neighbor interpolation on Teff and log(g) in the C3K grid
			C3KNN = NearestNDInterpolator(
				np.array([C3Kpars['logt'],C3Kpars['logg']]).T,range(0,len(C3Kpars))
				)((teff_i,logg_i))

			# determine the labels for the selected C3K spectrum
			label_i = list(C3Kpars[C3KNN])		
			labels.append(label_i)

		return np.array(labels)
Esempio n. 22
0
    def __init__(self, source, target, parameters):
        import pickle
        ModularConnectorFunction.__init__(self, source, target, parameters)
        t_size = target.size_in_degrees()
        f = open(self.parameters.or_map_location, 'r')
        mmap = pickle.load(f)
        coords_x = numpy.linspace(-t_size[0] / 2.0, t_size[0] / 2.0,
                                  numpy.shape(mmap)[0])
        coords_y = numpy.linspace(-t_size[1] / 2.0, t_size[1] / 2.0,
                                  numpy.shape(mmap)[1])
        X, Y = numpy.meshgrid(coords_x, coords_y)
        self.mmap = NearestNDInterpolator(zip(X.flatten(), Y.flatten()),
                                          mmap.flatten())
        self.or_source = self.mmap(
            numpy.transpose(
                numpy.array([
                    self.source.pop.positions[0], self.source.pop.positions[1]
                ]))) * numpy.pi

        for (index, neuron2) in enumerate(target.pop.all()):
            val_target = self.mmap(self.target.pop.positions[0][index],
                                   self.target.pop.positions[1][index])
            self.target.add_neuron_annotation(index,
                                              'ORMapOrientation',
                                              val_target * numpy.pi,
                                              protected=False)
Esempio n. 23
0
def generic_gridder(nc, cursor, idx):
    """
    Generic gridding algorithm for easy variables
    """
    lats = []
    lons = []
    vals = []
    for row in cursor:
        if row[idx] is not None and row["station"] in NT.sts:
            lats.append(NT.sts[row["station"]]["lat"])
            lons.append(NT.sts[row["station"]]["lon"])
            vals.append(row[idx])
    if len(vals) < 4:
        print(
            ("Only %s observations found for %s, won't grid")
            % (len(vals), idx)
        )
        return None

    xi, yi = np.meshgrid(nc.variables["lon"][:], nc.variables["lat"][:])
    nn = NearestNDInterpolator((lons, lats), np.array(vals))
    grid = nn(xi, yi)
    print(
        ("%s %s %.3f %.3f")
        % (cursor.rowcount, idx, np.max(grid), np.min(grid))
    )
    if grid is not None:
        return grid
    return None
Esempio n. 24
0
def merge(nc, valid, gribname, vname):
    """Merge in the grib data"""
    fn = valid.strftime(("/mesonet/ARCHIVE/data/%Y/%m/%d/model/cfs/%H/" +
                         gribname + ".01.%Y%m%d%H.daily.grib2"))
    if not os.path.isfile(fn):
        print("cfs2iemre missing %s, abort" % (fn, ))
        sys.exit()
    grbs = pygrib.open(fn)
    lats = None
    lons = None
    xi, yi = np.meshgrid(iemre.XAXIS, iemre.YAXIS)
    for grib in tqdm(grbs,
                     total=grbs.messages,
                     desc=vname,
                     disable=not sys.stdout.isatty()):
        ftime = valid + datetime.timedelta(hours=grib.forecastTime)
        # move us safely back to get into the proper date
        cst = ftime - datetime.timedelta(hours=7)
        if cst.year != valid.year:
            continue
        if lats is None:
            lats, lons = grib.latlons()
        vals = grib.values
        nn = NearestNDInterpolator((lons.flat, lats.flat), vals.flat)
        vals = nn(xi, yi)
        tstep = iemre.daily_offset(cst.date())
        current = nc.variables[vname][tstep, :, :]
        if current.mask.all():
            current[:, :] = DEFAULTS[vname]
        nc.variables[vname][tstep, :, :] = AGGFUNC[vname](current, vals)
Esempio n. 25
0
def test_contour_linear_ring():
    """Test contourf with a section that only has 3 points."""
    ax = plt.axes([0.01, 0.05, 0.898, 0.85],
                  projection=ccrs.Mercator(),
                  aspect='equal')
    ax.set_extent([-99.6, -89.0, 39.8, 45.5])

    xbnds = ax.get_xlim()
    ybnds = ax.get_ylim()
    ll = ccrs.Geodetic().transform_point(xbnds[0], ybnds[0], ax.projection)
    ul = ccrs.Geodetic().transform_point(xbnds[0], ybnds[1], ax.projection)
    ur = ccrs.Geodetic().transform_point(xbnds[1], ybnds[1], ax.projection)
    lr = ccrs.Geodetic().transform_point(xbnds[1], ybnds[0], ax.projection)
    xi = np.linspace(min(ll[0], ul[0]), max(lr[0], ur[0]), 100)
    yi = np.linspace(min(ll[1], ul[1]), max(ul[1], ur[1]), 100)
    xi, yi = np.meshgrid(xi, yi)
    nn = NearestNDInterpolator((np.arange(-94, -85), np.arange(36, 45)),
                               np.arange(9))
    vals = nn(xi, yi)
    lons = xi
    lats = yi
    window = np.ones((6, 6))
    vals = convolve2d(vals,
                      window / window.sum(),
                      mode='same',
                      boundary='symm')
    ax.contourf(lons, lats, vals, np.arange(9), transform=ccrs.PlateCarree())

    plt.draw()
    def create_ascii_grid_interpolator(arr, meta, ignore_nodata=True):
        "read an ascii grid into a map, without the no-data values"

        rows, cols = arr.shape

        cellsize = int(meta["cellsize"])
        xll = int(meta["xllcorner"])
        yll = int(meta["yllcorner"])
        nodata_value = meta["nodata_value"]

        xll_center = xll + cellsize // 2
        yll_center = yll + cellsize // 2
        yul_center = yll_center + (rows - 1)*cellsize

        points = []
        values = []

        for row in range(rows):
            for col in range(cols):
                value = arr[row, col]
                if ignore_nodata and value == nodata_value:
                    continue
                r = xll_center + col * cellsize
                h = yul_center - row * cellsize
                points.append([r, h])
                values.append(value)

        return NearestNDInterpolator(np.array(points), np.array(values))
Esempio n. 27
0
def try_merra(ts):
    """Attempt to use MERRA data."""
    # Our files are UTC date based :/
    ncfn1 = ts.strftime("/mesonet/merra2/%Y/%Y%m%d.nc")
    ncfn2 = (
        ts + datetime.timedelta(days=1)
    ).strftime("/mesonet/merra2/%Y/%Y%m%d.nc")
    if not os.path.isfile(ncfn1) or not os.path.isfile(ncfn2):
        return False
    with ncopen(ncfn1) as nc:
        # Total up from 6z to end of file for today
        total = np.sum(nc.variables['SWGDN'][5:, :, :], axis=0)
    with ncopen(ncfn2) as nc:
        lat1d = nc.variables['lat'][:]
        lon1d = nc.variables['lon'][:]
        # Total up to 6z
        total += np.sum(nc.variables['SWGDN'][:6, :, :], axis=0)

    # We wanna store as W m-2, so we just average out the data by hour
    total = total / 24.0

    lons, lats = np.meshgrid(lon1d, lat1d)
    nn = NearestNDInterpolator(
        (lons.flatten(), lats.flatten()), total.flatten()
    )
    xi, yi = np.meshgrid(iemre.XAXIS, iemre.YAXIS)

    ds = iemre.get_grids(ts.date(), varnames='rsds')
    ds['rsds'].values = nn(xi, yi)
    iemre.set_grids(ts.date(), ds)
    subprocess.call(
        "python db_to_netcdf.py %s" % (ts.strftime("%Y %m %d"), ),
        shell=True)

    return True
Esempio n. 28
0
def generic_gridder(df, idx):
    """
    Generic gridding algorithm for easy variables
    """
    window = 2.0
    f1 = df[df[idx].notnull()]
    for lat in np.arange(iemre.SOUTH, iemre.NORTH, window):
        for lon in np.arange(iemre.WEST, iemre.EAST, window):
            (west, east, south, north) = (lon, lon + window, lat, lat + window)
            box = f1[(f1['lat'] >= south) & (f1['lat'] < north) &
                     (f1['lon'] >= west) & (f1['lon'] < east)]
            if len(box.index) < 4:
                continue
            z = np.abs(zscore(box[idx]))
            # Compute where the standard dev is +/- 2std
            bad = box[z > 1.5]
            df.loc[bad.index, idx] = np.nan
            # for _, row in bad.iterrows():
            #    print _, idx, row['station'], row['name'], row[idx]

    good = df[df[idx].notnull()]

    nn = NearestNDInterpolator((np.array(good['lon']), np.array(good['lat'])),
                               np.array(good[idx]))
    xi, yi = np.meshgrid(iemre.XAXIS, iemre.YAXIS)
    grid = nn(xi, yi)

    return grid
Esempio n. 29
0
    def InterpolateOntoCartesian(X, Y, Z, RHO, newX, newY, newZ):
        from joblib import Parallel, delayed
        from scipy.spatial import Delaunay
        from scipy.interpolate import NearestNDInterpolator

        radialCoords = np.stack((X, Y, Z)).T
        newCoords = np.stack((newX.ravel(), newY.ravel(), newZ.ravel())).T
        newCoordsChunks = np.array_split(newCoords,
                                         indices_or_sections=100,
                                         axis=0)

        st.write('Preparing interpolation grid...')
        tri = Delaunay(radialCoords,
                       )  #qhull_options='Qbb Qc Qz Q0 Q3 Q5 Q8 C0 C-0')
        interpolator = NearestNDInterpolator(tri, RHO.ravel())

        st.write('Interpolating...')

        def DoOneChunk(i, chunk):
            print(f'Chunk {i} of {len(newCoordsChunks)}')
            RHOinterpolated = interpolator(chunk)
            RHOinterpolated = np.nan_to_num(RHOinterpolated)
            # Zero out cartesian coordinates that are outside the original radius.
            dist = np.sqrt(chunk[:, 0]**2 + chunk[:, 1]**2 + chunk[:, 2]**2)
            RHOinterpolated[dist > np.max(X)] = 0
            return RHOinterpolated

        results = Parallel(n_jobs=5, verbose=100, pre_dispatch=10)(
            delayed(DoOneChunk)(i, chunk)
            for i, chunk in enumerate(newCoordsChunks))
        return np.concatenate(results)
Esempio n. 30
0
def area(base_folder):
    """
    Parameters
    ----------
    base_folder : TYPE
        Top level folder of the RSM Suite

    Returns
    -------
    Projected Area

    """
    values=[]
    areafile = (base_folder+os.sep+"Outputs/Projected_Area/Aout.dat")
    fcount = open(areafile,'r')
    line_count =0
    for i in fcount:
        if i != "\n":
            line_count += 1
    fcount.close()
    
    f = open(areafile,'r')
    j=0
    for line in f: 
        
        data = line.split()
        floats = []
        for elem in data:
            try:
                floats.append(float(elem))
            except ValueError:
                pass
        if j == 0:
            columns = len(floats)
            values = np.zeros([line_count,columns])
        values[j,:] = np.array(floats)
        j+= 1

    f.close()
     

    points = values[:,1:]
    areavalues = np.array(values[:,0])
   
    
    
    csvfile = (base_folder+os.sep+"Inputs/Model_Evaluation_Inputs/Model_Input_Data.csv")
    inp = np.loadtxt(csvfile,delimiter=',',skiprows=1)
    

    yawreq = np.reshape(inp[:,3],(len(inp),1))
    pitchreq = np.reshape(inp[:,4],(len(inp),1))
    rotreq = inp[:,12:]
    request = np.hstack((yawreq,pitchreq,rotreq))
    
    
    interp=NearestNDInterpolator(points, areavalues)
    
    project_area = interp(request)
    print(project_area)