def calc_constriction_site(pdbid: str): """Here the assumption is that the constriction site is unique and so is then the KDTree min then.""" pdbid = pdbid.upper() chainL22: str = log.get_struct(pdbid)['uL22'].values[0] chainL4: str = log.get_struct(pdbid)['uL4'].values[0] if ',' in chainL22: chainL22 = chainL22.split(',')[0] if ',' in chainL4: chainL4 = chainL4.split(',')[0] struct = fetchStructure(pdbid)[0] L4: Chain = struct[chainL4] L22: Chain = struct[chainL22] l4res = [*L4.get_atoms()] l22res = [*L22.get_atoms()] kdtreeonl4 = KDTree([*map(lambda x: x.get_coord(), l4res)]) kdtreeonl22 = KDTree([*map(lambda x: x.get_coord(), l22res)]) nbridin22: Atom = None nbridin4: Atom = None atom: Atom distances_indexes = kdtreeonl4.query( [*map(lambda x: x.get_coord(), l22res)]) dist = 99999999 for x in zip([*distances_indexes[0]], [*distances_indexes[1]]): if x[0] < dist: dist = x[0] nbridin4 = x[1] distances_indexes = kdtreeonl22.query( [*map(lambda x: x.get_coord(), l4res)]) dist = 99999999 for x in zip([*distances_indexes[0]], [*distances_indexes[1]]): if x[0] < dist: dist = x[0] nbridin22 = x[1] l4atomcord = l4res[nbridin4].get_coord() l22atomcord = l22res[nbridin22].get_coord() centerline = np.mean([l4atomcord, l22atomcord], axis=0) residueInL4: Residue = l4res[nbridin4].get_parent() residueInL22: Residue = l22res[nbridin22].get_parent() print( """{} {} in chain {}({}) is closest to {} {} in chain{}({}).\n Centerline:{}""" .format(residueInL22.get_resname(), residueInL22.get_id()[1], chainL22, 'uL22', residueInL4.get_resname(), residueInL4.get_id()[1], chainL4, 'uL4', centerline)) return {"uL22": residueInL22, "uL4": residueInL4, "centerline": centerline}
def estimatemeans(self): feature_positions = [] for i in range(len(self.edges[0]) - 1): for j in range(len(self.edges[1]) - 1): if len(self.points[i, j]) > 0: feature_positions.append([i, j]) tree = KDTree(feature_positions) for i in range(len(self.edges[0]) - 1): for j in range(len(self.edges[1]) - 1): if self.points[i, j] == []: radius, neighbour = tree.query(x=np.array([i, j]), k=1) if radius > feature_resolution[0] / 10: self.estmeans[i, j] = np.nan self.means[i, j] = np.nan else: nearby = tree.data[tree.query_ball_point( x=np.array([i, j]), r=radius)] #print([self.trumeans[i] for i in nearby]) estmean = np.nanmean( [self.trumeans[i.astype(int)] for i in nearby]) self.estmeans[i, j] = estmean self.means[i, j] = estmean else: self.means[i, j] = self.trumeans[i, j]
def get_nearest(lasfile): """ return a nearest neighbor kdtree query """ dataset = np.vstack([lasfile.x, lasfile.y, lasfile.z]).transpose() tree = KDTree(dataset) query = tree.query(dataset[100,], k=5) return query
def get_interpolation_weights(self, source_lons = None, source_lats = None, dest_lons = None, dest_lats = None, nneighbours = 4 ): """ get the interpolation array M (nx*ny, nneighbors) and negibor indices G: DEST_FIELD = M * SOURCE_FIELD.flatten()[G] """ source_lons_1d, source_lats_1d = source_lons.flatten(), source_lats.flatten() [x,y,z] = lat_lon.lon_lat_to_cartesian_normalized(source_lons_1d, source_lats_1d) kdtree = KDTree(zip(x, y, z)) [xi, yi, zi] = lat_lon.lon_lat_to_cartesian_normalized(dest_lons.flatten(), dest_lats.flatten()) [distances, indices] = kdtree.query( zip( xi, yi, zi ) , k = nneighbours ) if len(distances.shape) == 2: weights = 1.0 / distances ** 2 norm = weights.sum(axis = 1) norm = np.array( [norm] * nneighbours ).transpose() weights /= norm else: weights = np.ones(distances.shape) return weights, indices
def compute_graph(samples, weights=None, p_norm=2, max_degree=10, max_distance=INF, approximate_eps=0.): vertices = list(range(len(samples))) edges = set() if not vertices: return vertices, edges if weights is None: weights = np.ones(len(samples[0])) embed_fn = get_embed_fn(weights) embedded = list(map(embed_fn, samples)) kd_tree = KDTree(embedded) for v1 in vertices: # TODO: could dynamically compute distances distances, neighbors = kd_tree.query(embedded[v1], k=max_degree + 1, eps=approximate_eps, p=p_norm, distance_upper_bound=max_distance) for d, v2 in zip(distances, neighbors): if (d < max_distance) and (v1 != v2): edges.update([(v1, v2), (v2, v1)]) # print(time.time() - start_time, len(edges), float(len(edges))/len(samples)) return vertices, edges
class NeighborsFinder: def __init__(self, data): """Find neigbors Args: data (pd.DataFrame or dict): data with at columns id,pos,and eventually range """ if data is not None: self.data = data # Safety checks assert isinstance(data, pd.DataFrame) assert "pos" in data.columns # Initialize KDTree finder from scipy self.tree = KDTree(self.data["pos"].tolist()) def find_in_range(self, obj, search_range): # TODO # Will not work with double colliders # Can be made using colliders in PyGame ? # Safety check assert hasattr(self, "tree") # Get position from dataset pos = obj.pos # Find neighbors in range # We remove the first one which is the identity object idx = self.tree.query_ball_point(pos, search_range) # Return filtered data ids = self.data.iloc[idx].index.tolist() assert obj.id not in ids return ids def find_closest(self, obj, k=1): # Safety check assert hasattr(self, "tree") # Get object position from which we want to find neighbors pos = obj.pos # Get position from dataset distances, idx = self.tree.query(pos, k=k) if k == 1: distances = [distances] idx = [idx] # Get ids from dataset # Safe check object is not in neighbors, which would mean above we remove another overlapping objects with [1:] ids = self.data.iloc[idx].index.tolist() assert obj.id not in ids return distances, ids
def warp_keypoints(self, keypoints, grid_unnormalized): from scipy.spatial.kdtree import KDTree warp_grid = grid_unnormalized.reshape(-1, 2) regular_grid = self.grid_pixels_unnormalized.reshape(-1, 2) kd = KDTree(warp_grid) dists, idxs = kd.query(keypoints) new_keypoints = regular_grid[idxs] return new_keypoints
def compute_minimax_distance(path1, path2): overall_distance = 0. for path, other in permutations([path1, path2]): tree = KDTree(other) for q1 in path: #closest_distance = min(get_distance(q1, q2) for q2 in other) closest_distance, closest_index = tree.query(q1, k=1, eps=0.) overall_distance = max(overall_distance, closest_distance) return overall_distance
def mutual_knn(points, n=10, distance=radial_kernel()): knn = {} kt = KDTree(points) for i, point in enumerate(points): # cannot use euclidean distance directly for neighbour in kt.query(point, n + 1)[1]: if i != neighbour: knn.setdefault(i, []).append((distance(point, points[neighbour]), neighbour)) return knn
def mutual_knn(points, n=10, distance=radial_kernel()): knn = {} kt = KDTree(points) for i, point in enumerate(points): # cannot use euclidean distance directly for neighbour in kt.query(point, n + 1)[1]: if i != neighbour: knn.setdefault(i, []).append( (distance(point, points[neighbour]), neighbour)) return knn
class scan(cluster): def __init__(self, filepath): start = time.time() self.file = File(filepath, mode="r") self.scale = self.file.header.scale[0] self.offset = self.file.header.offset[0] self.tree = KDTree( np.vstack([self.file.x, self.file.y, self.file.z]).transpose()) self.time = self.file.header.get_date() end = time.time() - start print("Time Elapsed: {}".format(end)) def nearNeighbor(self, point, k=1): return self.tree.query(point, k=k) def radialcluster(self, point, radius): neighbor = self.tree.data[self.tree.query(point, k=1)[1]] points = self.tree.data[self.tree.query_ball_point(neighbor, radius)] print("{} Points \n".format(points.shape[0])) return np.array(points)
def constructKnnGraph(points, numNeighbor): def euclidean_kernel(a, b): d = np.linalg.norm(a-b) return d knn = {} kt = KDTree(points) for i, point in enumerate(points): for neighbour in kt.query(point, numNeighbor + 1)[1]: if i != neighbour: knn.setdefault(i, []).append((euclidean_kernel(point, points[neighbour]), neighbour)) return knn
def knnClassify(trData, trLabels, testData, nClasses): #run the knn classifier using the raw features (nDocs, _) = testData.shape knnLabels = numpy.zeros((nDocs, nClasses)) leafsize = 5 kdtree = KDTree(trData.tolist(), leafsize) (_, idxs) = kdtree.query(testData.tolist(), 3) for d in range(nDocs): testPointIdxs = idxs[d] votes = [trLabels[l] for l in testPointIdxs] classVotes = [0] * nClasses for v in votes: classVotes[v] += 1 knnLabels[d, classVotes.index(max(classVotes))] = 1 return (knnLabels, kdtree)
def test_evol_for_point(): lon = -100 lat = 60 path = "/home/huziy/skynet1_rech3/cordex/for_Samira/b1/tmp_era40_b1.nc" ds = Dataset(path) data = ds.variables["tmp"][:] years = ds.variables["year"][:] coord_file = "/skynet1_rech3/huziy/cordex/CORDEX_DIAG/NorthAmerica_0.44deg_CanESM_B1" coord_file = os.path.join(coord_file, "pmNorthAmerica_0.44deg_CanHisto_B1_195801_moyenne") #coord_file = os.path.join(data_folder, "pmNorthAmerica_0.44deg_ERA40-Int2_195801_moyenne") b, lons2d, lats2d = draw_regions.get_basemap_and_coords(file_path=coord_file) sel_lons = [lon] sel_lats = [lat] xo,yo,zo = lat_lon.lon_lat_to_cartesian(sel_lons, sel_lats) xi, yi, zi = lat_lon.lon_lat_to_cartesian(lons2d.flatten(), lats2d.flatten()) ktree = KDTree(list(zip(xi,yi,zi))) dists, indexes = ktree.query(list(zip(xo,yo,zo))) print(len(indexes)) print(indexes) idx = indexes[0] import matplotlib.pyplot as plt plt.figure() data_to_show = [] for i, y in enumerate(years): data_to_show.append(data[i,:,:].flatten()[idx]) plt.plot(years, data_to_show, "-s", lw = 3) plt.grid() plt.show() pass
def get_edges(df1, df2): from scipy.spatial.kdtree import KDTree get_label = lambda x: tuple(int(y) for y in x[[2, 3]]) x1 = df1[['i', 'j', 'frame', 'label']].as_matrix() x2 = df2[['i', 'j', 'frame', 'label']].as_matrix() kdt = KDTree(df1[['i', 'j']]) points = df2[['i', 'j']] result = kdt.query(points, 3) edges = [] for i2, (ds, ns) in enumerate(zip(*result)): end_node = get_label(x2[i2]) for d, i1 in zip(ds, ns): start_node = get_label(x1[i1]) w = d edges.append((start_node, end_node, w)) return edges
def _calculatePointResiduals(self, curve, tube_radius = None): if tube_radius is None: X = self._X else: within_tube_indices = self.calculateCoverageIndices(curve, tube_radius) X = self._X.take(list(within_tube_indices), axis = 0) if self._maxSegmentLength is None: self._maxSegmentLength = self._calculateMaxSegmentLength(curve) lpc_points = curve['save_xd'] num_lpc_points = len(lpc_points) tree_lpc_points = KDTree(lpc_points) residuals = empty(len(X)) residuals_lamb = empty(len(X)) path_length = curve['lamb'] for j, p in enumerate(X): closest_lpc_point = tree_lpc_points.query(p) candidate_radius = sqrt(closest_lpc_point[0]**2 + 0.25*self._maxSegmentLength**2) candidate_segment_ends = tree_lpc_points.query_ball_point(p, candidate_radius) candidate_segment_ends.sort() current_min_segment_dist = (closest_lpc_point[0],0) current_closest_index = closest_lpc_point[1] last_index = None for i, index in enumerate(candidate_segment_ends): if index!=0 and last_index != index - 1: prv_segment_dist = self._distancePointToLineSegment(lpc_points[index-1], lpc_points[index], p) if prv_segment_dist[0] < current_min_segment_dist[0]: current_min_segment_dist = prv_segment_dist current_closest_index = index - 1 if index != num_lpc_points - 1: prv_segment_dist = self._distancePointToLineSegment(lpc_points[index], lpc_points[index+1], p) if prv_segment_dist[0] < current_min_segment_dist[0]: current_min_segment_dist = prv_segment_dist current_closest_index = index last_index = index residuals[j] = current_min_segment_dist[0] residuals_lamb[j] = path_length[current_closest_index] + current_min_segment_dist[1] lamb_order = argsort(residuals_lamb) return (residuals_lamb[lamb_order], residuals[lamb_order])
def get_data_interpolated_to_points(self, dest_lons = None, dest_lats = None, source_lons = None, source_lats = None, data = None): """ Designed to interpolate all data to the AMNO domain """ if None not in [source_lons, source_lats]: lons1d = source_lons.flatten() lats1d = source_lats.flatten() points = lat_lon.lon_lat_to_cartesian_normalized(lons1d, lats1d) points = np.array(points).transpose() point_tree = KDTree(points) else: point_tree = self.kd_tree assert source_lons.shape == source_lats.shape == data.shape [xi, yi, zi] = lat_lon.lon_lat_to_cartesian_normalized(dest_lons.flatten(), dest_lats.flatten()) pointsi = np.array([xi, yi, zi]).transpose() data1d = data.flatten() #distances dimensions = (n_points, n_neigbours) [distances, indices] = point_tree.query(pointsi, k = 4) weights = 1.0 / distances ** 2 norm = [np.sum(weights, axis = 1)] * weights.shape[1] norm = np.array(norm).transpose() weights /= norm result = [] for i in xrange(pointsi.shape[0]): w = weights[i, :] d = data1d[indices[i, :]] result.append(np.sum(w * d)) return np.array(result).reshape( dest_lons.shape )
def join_by_cell_location(df_cells, df_ph, max_distance=4): from scipy.spatial.kdtree import KDTree # df_cells = df_cells.sort_values(['well', 'tile', 'cell']) # df_ph = df_ph.sort_values(['well', 'tile', 'cell']) i_tree = df_ph['global_y'] j_tree = df_ph['global_x'] i_query = df_cells['global_y'] j_query = df_cells['global_x'] kdt = KDTree(list(zip(i_tree, j_tree))) distance, index = kdt.query(list(zip(i_query, j_query))) cell_ph = df_ph.iloc[index]['cell'].pipe(list) cols_left = ['well', 'tile', 'cell_ph'] cols_right = ['well', 'tile', 'cell'] cols_ph = [c for c in df_ph.columns if c not in df_cells.columns] return (df_cells.assign( cell_ph=cell_ph, distance=distance).query('distance < @max_distance').join( df_ph.set_index(cols_right)[cols_ph], on=cols_left) # .drop(['cell_ph'], axis=1) )
def add_neighbors(df_info, num_neighbors=9, radius_leniency=10): xy = ['x_um', 'y_um'] xy = [GLOBAL_X, GLOBAL_Y] site = SITE df = df_info.drop_duplicates(xy) kdt = KDTree(df[xy].values) distance, index = kdt.query(df[xy].values, k=num_neighbors) # convert to m index = np.array(df[site])[index] m = mode(distance.max(axis=1).astype(int)).mode[0] filt = (distance < (m + radius_leniency)).all(axis=1) it = zip(df.loc[filt, site], index[filt]) arr = [] for m, ix in it: arr += [{site: m, 'ix': sorted(ix)}] return df_info.merge(pd.DataFrame(arr), how='left')
def upscale(manager_in, manager_out, swe_in, nneighbours=25): assert isinstance(manager_in, Crcm5ModelDataManager) assert isinstance(manager_out, Crcm5ModelDataManager) lons_in_1d = manager_in.lons2D.flatten() lats_in_1d = manager_in.lats2D.flatten() x0, y0, z0 = lat_lon.lon_lat_to_cartesian(lons_in_1d, lats_in_1d) kdtree = KDTree(list(zip(x0, y0, z0))) lons_out_1d = manager_out.lons2D.flatten() lats_out_1d = manager_out.lats2D.flatten() x1, y1, z1 = lat_lon.lon_lat_to_cartesian(lons_out_1d, lats_out_1d) dd, ii = kdtree.query(list(zip(x1, y1, z1)), k=nneighbours) print(ii.shape) swe_in_1d = swe_in.flatten() return np.mean(swe_in_1d[ii], axis=1).reshape(manager_out.lons2D.shape) pass
def upscale(manager_in, manager_out, swe_in, nneighbours = 25): assert isinstance(manager_in, Crcm5ModelDataManager) assert isinstance(manager_out, Crcm5ModelDataManager) lons_in_1d = manager_in.lons2D.flatten() lats_in_1d = manager_in.lats2D.flatten() x0, y0, z0 = lat_lon.lon_lat_to_cartesian(lons_in_1d, lats_in_1d) kdtree = KDTree(list(zip(x0,y0,z0))) lons_out_1d = manager_out.lons2D.flatten() lats_out_1d = manager_out.lats2D.flatten() x1, y1, z1 = lat_lon.lon_lat_to_cartesian(lons_out_1d, lats_out_1d) dd, ii = kdtree.query(list(zip(x1, y1, z1)), k=nneighbours) print(ii.shape) swe_in_1d = swe_in.flatten() return np.mean(swe_in_1d[ii], axis=1).reshape(manager_out.lons2D.shape) pass
def make_edge_graph(data, k, ball = True): kt = KDTree(data) out = [set() for i in data] k_max = 0 k_min = np.infty for i, points in enumerate(data): if(ball): neighbors = kt.query_ball_point(points, k) else: distance, neighbors = kt.query(points,int(k+1)) neighbors = neighbors[1:] for j in neighbors: if(j != i): out[i].add(j) if(len(out[i])>k_max): k_max = len(out[i]) out[j].add(i) if(len(out[j])>k_max): k_max = len(out[j]) if(len(out[i])<k_min): k_min = len(out[i]) return (k_min, k_max, out)
def plot_current_alts_nyear_rule(nyear=2): start_year = 1981 end_year = 2008 sim_data_folder = "/home/huziy/skynet1_rech3/cordex/CORDEX_DIAG/era40_driven_b1" sim_names = ["ERA40", "MPI", "CanESM"] all_data_f = "/home/huziy/skynet1_rech3/cordex/for_Samira" simname_to_path = { "ERA40": os.path.join(all_data_f, "alt_era_b1_yearly.nc"), "MPI": os.path.join(all_data_f, "alt_mpi_b1_yearly.nc"), "CanESM": os.path.join(all_data_f, "alt_canesm_b1_yearly.nc") } coord_file = os.path.join( sim_data_folder, "pmNorthAmerica_0.44deg_ERA40-Int_B1_200812_moyenne") basemap, lons2d, lats2d = draw_regions.get_basemap_and_coords( resolution="c", file_path=coord_file, llcrnrlat=40.0, llcrnrlon=-145, urcrnrlon=-20, urcrnrlat=74) assert isinstance(basemap, Basemap) #basemap.transform_scalar() #basemap = Basemap() lons2d[lons2d > 180] -= 360 x, y = basemap(lons2d, lats2d) #x = (x[1:,1:] + x[:-1, :-1]) /2.0 permafrost_mask = draw_regions.get_permafrost_mask(lons2d, lats2d) mask_cond = (permafrost_mask <= 0) | (permafrost_mask >= 3) # plot_utils.apply_plot_params(width_pt=None, width_cm=20, height_cm=40, font_size=25) fig = plt.figure() assert isinstance(fig, Figure) h_max = 10 cmap = my_colormaps.get_lighter_jet_cmap( ncolors=10) #cm.get_cmap("jet",10) bounds = [0, 0.1, 0.5, 1, 2, 3, 5, 8, 9, 10, 11] norm = BoundaryNorm(boundaries=bounds, ncolors=len(bounds), clip=True) cmap.set_over(cmap(1.0)) clevels = np.arange(0, h_max + 1, 1) gs = gridspec.GridSpec(3, 1) all_axes = [] all_img = [] i = 0 hc_list = [] hct_list = [] for name in sim_names: path = simname_to_path[name] #select data and needed alt ds = Dataset(path) years = ds.variables["year"][:] hct = ds.variables["alt"][(years >= start_year) & (years <= end_year), :, :] hct_list.append(hct) print("hct.shape = ", hct.shape) #hc = get_alt_using_nyear_rule(hct, nyears = nyear) hc = np.mean(hct, axis=0) hc_list.append(hc) ax = fig.add_subplot(gs[i, 0]) assert isinstance(ax, Axes) hc = np.ma.masked_where(mask_cond | (np.min(hct, axis=0) < 0), hc) #hc = np.ma.masked_where( (hc < 0), hc) img = basemap.pcolormesh(x, y, hc, cmap=cmap, vmax=h_max, norm=norm) if not i: ax.set_title("ALT, mean ({0} - {1}) \n".format( start_year, end_year)) i += 1 ax.set_ylabel(name) all_axes.append(ax) all_img.append(img) i = 0 axs_to_hide = [] #zones and coastlines for the_ax, the_img in zip(all_axes, all_img): assert isinstance(the_ax, Axes) basemap.drawcoastlines(ax=the_ax, linewidth=0.5) basemap.readshapefile("data/pf_4/permafrost8_wgs84/permaice", name="zone", ax=the_ax, linewidth=1.5) divider = make_axes_locatable(the_ax) cax = divider.append_axes("right", "5%", pad="3%") cb = fig.colorbar(the_img, cax=cax, extend="max", ticks=bounds) cax.set_title("m \n") if i != 2: axs_to_hide.append(cax) i += 1 fig.tight_layout(w_pad=0.0) for the_ax in axs_to_hide: the_ax.set_visible(False) fig.savefig("alt_mean_current.png") #print ALT for selected points site_names = ["S", "K", "T"] sel_lons = [-75.646, -65.92, -69.95] sel_lats = [62.197, 58.709, 58.67] xo, yo, zo = lat_lon.lon_lat_to_cartesian(sel_lons, sel_lats) xi, yi, zi = lat_lon.lon_lat_to_cartesian(lons2d.flatten(), lats2d.flatten()) ktree = KDTree(list(zip(xi, yi, zi))) dists, indexes = ktree.query(list(zip(xo, yo, zo))) for name, data, the_hct in zip(sim_names, hc_list, hct_list): print(name) flat_data = data.flatten() for p_name, ind in zip(site_names, indexes): in_data = [] for t in range(the_hct.shape[0]): in_data.append(the_hct[t, :, :].flatten()[ind]) print(",".join(["{0:.1f}".format(float(x)) for x in in_data])) print(p_name, "{0:.1f} m".format(float(flat_data[ind]))) print("--" * 10)
vectors.append(vector) # perform q few similarity queries queries = [ "i", "name", "jQuery", "counter", "element", "true", "msg", "length" ] # for token-based queries = [ "ID:i", "ID:name", "ID:jQuery", "ID:counter", "ID:element", "LIT:true", "ID:msg", "ID:length" ] # for AST-based kd_tree = KDTree(np.array(vectors)) for query in queries: if query in name_to_vector: print(query + " has similar names:") query_vector = name_to_vector[query] _, neighbor_idxs = kd_tree.query(query_vector, k=6) closest_names = [] for idx in neighbor_idxs: close_name = names[idx] if close_name != query: print(" " + close_name) # show PCA pca_vectors = [] pca_labels = [] for idx, name in enumerate(names): if random.random() < sampling_rate_for_PCA: pca_labels.append(name) pca_vectors.append(vectors[idx]) ipca = IncrementalPCA(n_components=2)
class GrdcDataManager: def __init__(self, path_tofolder = "/home/huziy/skynet3_exec1/grdc_global"): self.path_to_annual_rof = os.path.join(path_tofolder, "obs_ro.grd") self.lons2d = None self.lats2d = None self.ncols = None self.nrows = None self.xll = None self.yll = None self.cellsize = None self.nodata_value = None self.ktree = None pass def _get_lon_lats(self): if self.lons2d is None: lons = [ self.xll + i * self.cellsize for i in range(self.ncols) ] lats = [ self.yll + i * self.cellsize for i in range(self.nrows) ] self.lats2d, self.lons2d = np.meshgrid(lats, lons) return self.lons2d, self.lats2d def interpolate_data_to_model_grid(self, model_lons_2d, model_lats_2d, data_obs): x0, y0, z0 = lat_lon.lon_lat_to_cartesian(model_lons_2d.flatten(), model_lats_2d.flatten()) x, y, z = lat_lon.lon_lat_to_cartesian(self.lons2d.flatten(), self.lats2d.flatten()) if self.ktree is None: self.ktree = KDTree(list(zip(x, y, z))) d, i = self.ktree.query(list(zip(x0, y0, z0))) return data_obs.flatten()[i].reshape(model_lons_2d.shape) def _read_data_from_file(self, path): f = open(path) vals = [] for line in f: line = line.strip() if line == "": continue if line.startswith("ncols"): self.ncols = int(line.split()[1].strip()) elif line.startswith("nrows"): self.nrows = int(line.split()[1].strip()) elif line.startswith("xllcorner"): self.xll = float(line.split()[1].strip()) elif line.startswith("yllcorner"): self.yll = float(line.split()[1].strip()) elif line.startswith("cellsize"): self.cellsize = float(line.split()[1].strip()) elif line.startswith("NODATA"): self.nodata_value = int(line.split()[1].strip()) else: vals.append( list(map(float, [s.strip() for s in line.split()])) ) #print len(vals), self.ncols * self.nrows vals = np.array( vals[::-1] )#reverse row order vals = vals.transpose() return vals def get_mean_annual_runoff_in_mm_per_s(self): vals = self._read_data_from_file(self.path_to_annual_rof) #vals = np.ma.masked_where(vals.astype(int) == self.nodata_value, vals) vals = np.ma.masked_where(vals < 0, vals) print(self.nodata_value, np.min(vals), self.nodata_value == np.min(vals)) vals /= 365 * 24 * 60 * 60 #convert to mm/s self._get_lon_lats() return self.lons2d, self.lats2d, vals
def plot_current_alts(): from . import plot_dpth_to_bdrck bdrck_field = plot_dpth_to_bdrck.get_depth_to_bedrock() start_year = 1980 end_year = 1996 sim_data_folder = "/home/huziy/skynet1_rech3/cordex/CORDEX_DIAG/era40_driven_b1" coord_file = os.path.join(sim_data_folder, "pmNorthAmerica_0.44deg_ERA40-Int_B1_200812_moyenne") #coord_file = "/home/huziy/skynet1_rech3/cordex/CORDEX_DIAG/NA_1.0deg_soil_spinup2/pmNA_1.0deg_soil_spinup2_228006_moyenne" sim_names = ["ERA40", "MPI","CanESM"] simname_to_path = { #"ERA40": "/home/huziy/skynet1_rech3/cordex/CORDEX_DIAG/era40_driven_b1", "ERA40": "/home/huziy/skynet1_rech3/cordex/CORDEX_DIAG/NorthAmerica_0.44deg_ERA40-Int_old_snow_cond", #"ERA40" : "/home/huziy/skynet1_rech3/cordex/CORDEX_DIAG/NA_1.0deg_soil_spinup2", "MPI": "/home/huziy/skynet1_rech3/cordex/CORDEX_DIAG/NorthAmerica_0.44deg_MPI_B1", "CanESM": "/home/huziy/skynet1_rech3/cordex/CORDEX_DIAG/NorthAmerica_0.44deg_CanESM_B1" } basemap, lons2d, lats2d = draw_regions.get_basemap_and_coords(resolution="c", file_path = coord_file, llcrnrlat=45.0, llcrnrlon=-145, urcrnrlon=-20, urcrnrlat=74, anchor="W" ) assert isinstance(basemap, Basemap) #basemap.transform_scalar() #basemap = Basemap() lons2d[lons2d > 180] -= 360 x, y = basemap(lons2d, lats2d) #x = (x[1:,1:] + x[:-1, :-1]) /2.0 permafrost_mask = draw_regions.get_permafrost_mask(lons2d, lats2d) mask_cond = (permafrost_mask <= 0) | (permafrost_mask >= 2) # plot_utils.apply_plot_params(width_pt=None, width_cm=20, height_cm=40, font_size=16) fig = plt.figure() assert isinstance(fig, Figure) h_max = 10 bounds = [0,0.1,0.5,1,2,3,4,5] cmap = my_colormaps.get_lighter_jet_cmap(ncolors=len(bounds) - 1) #cm.get_cmap("jet",10) #cmap = my_colormaps.get_cmap_wo_red(ncolors=len(bounds) - 1) norm = BoundaryNorm(boundaries=bounds,ncolors=len(bounds), clip=True) cmap.set_over(cmap(1.0)) clevels = np.arange(0,h_max+1,1) gs = gridspec.GridSpec(3,2, width_ratios=[1,0.06], hspace=0, wspace=0.0, left=0.05, bottom = 0.02, top=0.95) all_axes = [] all_img = [] i = 0 hc_list = [] for name in sim_names: path = simname_to_path[name] dm = CRCMDataManager(data_folder=path) hc0, t3d_min, t3d_max = dm.get_alt_using_monthly_mean_climatology(list(range(start_year,end_year+1))) hc_list.append(hc0) ax = fig.add_subplot(gs[i,0]) #cp = SoundingPlotter(ax, basemap, t3d_min, t3d_max, lons2d, lats2d, levelheights=dm.level_heights) assert isinstance(ax, Axes) hc = np.ma.masked_where(mask_cond | (hc0 < 0), hc0) #hc = np.ma.masked_where( (hc0 < 0), hc0) hc5 = np.ma.masked_where((hc0 <= 15) | hc.mask, hc) img = basemap.pcolormesh(x, y, hc, cmap = cmap, vmax = h_max, norm=norm) if not i: ax.set_title("ALT ({0} - {1}) \n".format(start_year, end_year)) i += 1 ax.set_ylabel("CRCM ({0})".format(name)) all_axes.append(ax) all_img.append(img) print(np.ma.min(hc), np.ma.max(hc)) #hc5 = np.ma.masked_where((hc0 <= 6) | hc.mask, hc) #print "Number of cells with alt > 5 is {0}, and the range is {1} ... {2}".format(hc5.count(), hc5.min(), hc5.max()) #bdrck_field5 = np.ma.masked_where(hc5.mask, bdrck_field) #print "Bedrock ranges for those points: {0} ... {1}".format(bdrck_field5.min(), bdrck_field5.max()) ind = np.where(~hc5.mask) xs = ind[0] ys = ind[1] all_months, all_temps = dm.get_monthly_mean_soiltemps(year_range=range(start_year,end_year+1)) all_months_ord = date2num(all_months) # mpl.rcParams['contour.negative_linestyle'] = 'solid' # # for the_i, the_j in zip(xs,ys): # #plot profile # plt.figure() # plt.plot(t3d_max[the_i, the_j, :] - dm.T0, dm.level_heights, color = "r") # plt.plot(t3d_min[the_i, the_j, :] - dm.T0, dm.level_heights, color = "b") # plt.plot([0 , 0], [dm.level_heights[0], dm.level_heights[-1]], color = "k") # # x1, x2 = plt.xlim() # #plt.plot( [x1, x2], [bdrck_field[the_i, the_j], bdrck_field[the_i, the_j]], color = "k", lw = 3 ) # #plt.title(str(i) + ", dpth_to_bedrock = {0} m".format(bdrck_field[the_i, the_j])) # plt.title(str(i)) # plt.gca().invert_yaxis() # plt.savefig("prof{0}.png".format(i)) # ax.annotate(str(i), (x[the_i, the_j], y[the_i, the_j]), font_properties = # FontProperties(size=10)) # # # #plot vertical temp cross-section # plt.figure() # plt.title(str(i) + ", ({0} - {1})".format(start_year, end_year)) # # levs2d, times2d = np.meshgrid(dm.level_heights, all_months_ord) # clevs = [-25,-20,-10,-5,-1,0,1,5,10,20,25] # norm = BoundaryNorm(boundaries=clevs, ncolors=len(clevs) - 1) # cmap = cm.get_cmap("jet", len(clevs) - 1) # # img = plt.contourf(times2d, levs2d, all_temps[:,the_i, the_j, :] - dm.T0, levels = clevs, cmap = cmap, norm = norm) # #plt.contour(times2d, levs2d, all_temps[:,the_i, the_j, :] - dm.T0, levels = clevs, colors = "k", linewidth = 1) # the_ax = plt.gca() # assert isinstance(the_ax, Axes) # the_ax.invert_yaxis() # the_ax.xaxis.set_major_formatter(FuncFormatter( # lambda x, pos: num2date(float(x)).strftime("%Y") # )) # # print "i = {0}; lon, lat = {1}, {2}".format(i, lons2d[the_i, the_j], lats2d[the_i, the_j]) # # plt.colorbar(img, ticks = clevs) # # plt.savefig("temp_section_{0}.png".format(i)) # # i += 1 print("lons = [{0}]".format(",".join([str(x) for x in lons2d[np.array(xs), np.array(ys)]]))) print("lats = [{0}]".format(",".join([str(x) for x in lats2d[np.array(xs), np.array(ys)]]))) # draw barplot with numbers of alt in given ranges # plt.figure() # alt_ranges = xrange(0,18) # numbers = [] # for the_alt in alt_ranges: # hci = np.ma.masked_where( (hc0 < the_alt) | (hc0 > the_alt + 1) | hc.mask ,hc) # numbers.append(hci.count()) # plt.bar(alt_ranges, numbers, width=1) # plt.xlabel("ALT (m)") # plt.ylabel("Number of cells") # plt.savefig("numbers_in_range.png") i = 0 axs_to_hide = [] #zones and coastlines for the_ax, the_img in zip(all_axes, all_img): # divider = make_axes_locatable(the_ax) # cax = divider.append_axes("right", "5%", pad="3%") assert isinstance(the_ax, Axes) basemap.drawcoastlines(ax = the_ax, linewidth=0.5) basemap.readshapefile("data/pf_4/permafrost8_wgs84/permaice", name="zone", ax=the_ax, linewidth=1.5, drawbounds=False) for nshape,seg in enumerate(basemap.zone): if basemap.zone_info[nshape]["EXTENT"] not in ["C"]: continue poly = mpl.patches.Polygon(seg,edgecolor = "k", facecolor="none", zorder = 10, lw = 1.5) the_ax.add_patch(poly) # if i != 1: # axs_to_hide.append(cax) i += 1 cax = fig.add_subplot(gs[:,1]) cax.set_anchor("W") cax.set_aspect(35) formatter = FuncFormatter( lambda x, pos: "{0: <6}".format(x) ) cb = fig.colorbar(all_img[0], ax = cax, cax = cax, extend = "max", ticks = bounds, format = formatter) cax.set_title("m") #fig.tight_layout(h_pad=0) # for the_ax in axs_to_hide: # the_ax.set_visible(False) fig.savefig("alt_from_climatology_current.png") #print ALT for selected points site_names = ["S","K","T"] sel_lons = [-75.646, -65.92, -69.95] sel_lats = [62.197, 58.709, 58.67] xo,yo,zo = lat_lon.lon_lat_to_cartesian(sel_lons, sel_lats) xi, yi, zi = lat_lon.lon_lat_to_cartesian(lons2d.flatten(), lats2d.flatten()) ktree = KDTree(list(zip(xi,yi,zi))) dists, indexes = ktree.query(list(zip(xo,yo,zo))) for name, data in zip(sim_names, hc_list): print(name) flat_data = data.flatten() for p_name, ind in zip(site_names, indexes): print(p_name, "{0} m".format(flat_data[ind])) print("--" * 10) pass
def convert(inPath, lonlats): ds = gdal.Open(inPath, gdal.GA_ReadOnly) assert isinstance(ds, Dataset) (Xul, deltaX, rotation, Yul, rotation, deltaY) = ds.GetGeoTransform() print(dir(ds)) print(ds.GetMetadata_Dict()) print(ds.GetDescription()) srs_wkt = ds.GetProjection() Nx = ds.RasterXSize Ny = ds.RasterYSize print(ds.RasterCount) nxToRead = Nx / 2 nyToRead = int(Ny / 1.5) data = ds.GetRasterBand(1).ReadAsArray(0, 0, nxToRead, nyToRead).transpose() print(srs_wkt) print(data.shape) #plt.imshow(data) #plt.show() ds = None print(Xul, Yul, deltaX, deltaY, rotation) x1d = np.arange(Xul, Xul + deltaX * nxToRead, deltaX) y1d = np.arange(Yul, Yul + deltaY * nyToRead, deltaY) assert len(x1d) == nxToRead assert len(y1d) == nyToRead y, x = np.meshgrid(y1d, x1d) fieldName = os.path.basename(inPath).split("_")[0].lower() coef = name_to_mult[fieldName] no_data = name_to_nodata_value[fieldName] usable = (data != no_data) print(x.shape, usable.shape) x0 = x[usable] y0 = y[usable] cartx, carty, cartz = lat_lon.lon_lat_to_cartesian(x0, y0) data_1d = data[usable] print("useful data points : {0}".format(len(x0))) tree = KDTree(list(zip(cartx, carty, cartz))) print("constructed the kdtree") xi, yi, zi = lat_lon.lon_lat_to_cartesian(lonlats[:,0], lonlats[:,1]) dists, inds = tree.query(list(zip(xi, yi, zi)), k = AGGR_SIZE) npoints = dists.shape[0] interp_data = np.zeros((npoints, )) for i in range(npoints): the_dists = dists[i,:] the_inds = inds[i,:] good_pts = (the_dists < LIMIT_DIST) if len(the_dists[good_pts]) < 0.25 * AGGR_SIZE: #if there is no usable points in the vicinity, then set the value to no_data interp_data[i] = -1 continue the_dists = the_dists[good_pts] the_inds = the_inds[good_pts] interp_coefs = 1.0 / the_dists ** 2 interp_data[i] = np.sum( interp_coefs * data_1d[the_inds] ) / np.sum(interp_coefs) interp_data[interp_data >= 0] *= coef print("completed interpolation") return interp_data
class GrdcDataManager: def __init__(self, path_tofolder="/home/huziy/skynet3_exec1/grdc_global"): self.path_to_annual_rof = os.path.join(path_tofolder, "obs_ro.grd") self.lons2d = None self.lats2d = None self.ncols = None self.nrows = None self.xll = None self.yll = None self.cellsize = None self.nodata_value = None self.ktree = None pass def _get_lon_lats(self): if self.lons2d is None: lons = [self.xll + i * self.cellsize for i in range(self.ncols)] lats = [self.yll + i * self.cellsize for i in range(self.nrows)] self.lats2d, self.lons2d = np.meshgrid(lats, lons) return self.lons2d, self.lats2d def interpolate_data_to_model_grid(self, model_lons_2d, model_lats_2d, data_obs): x0, y0, z0 = lat_lon.lon_lat_to_cartesian(model_lons_2d.flatten(), model_lats_2d.flatten()) x, y, z = lat_lon.lon_lat_to_cartesian(self.lons2d.flatten(), self.lats2d.flatten()) if self.ktree is None: self.ktree = KDTree(list(zip(x, y, z))) d, i = self.ktree.query(list(zip(x0, y0, z0))) return data_obs.flatten()[i].reshape(model_lons_2d.shape) def _read_data_from_file(self, path): f = open(path) vals = [] for line in f: line = line.strip() if line == "": continue if line.startswith("ncols"): self.ncols = int(line.split()[1].strip()) elif line.startswith("nrows"): self.nrows = int(line.split()[1].strip()) elif line.startswith("xllcorner"): self.xll = float(line.split()[1].strip()) elif line.startswith("yllcorner"): self.yll = float(line.split()[1].strip()) elif line.startswith("cellsize"): self.cellsize = float(line.split()[1].strip()) elif line.startswith("NODATA"): self.nodata_value = int(line.split()[1].strip()) else: vals.append(list(map(float, [s.strip() for s in line.split()]))) #print len(vals), self.ncols * self.nrows vals = np.array(vals[::-1]) #reverse row order vals = vals.transpose() return vals def get_mean_annual_runoff_in_mm_per_s(self): vals = self._read_data_from_file(self.path_to_annual_rof) #vals = np.ma.masked_where(vals.astype(int) == self.nodata_value, vals) vals = np.ma.masked_where(vals < 0, vals) print(self.nodata_value, np.min(vals), self.nodata_value == np.min(vals)) vals /= 365 * 24 * 60 * 60 #convert to mm/s self._get_lon_lats() return self.lons2d, self.lats2d, vals
class TimeSeriesPlotter: def __init__(self, ax, basemap, lons2d, lats2d, ncVarDict, times, start_date, end_date): """ Plots a vertical profile at the point nearest to the clicked one :type ax: Axes """ assert isinstance(ax, Axes) self.basemap = basemap assert isinstance(self.basemap, Basemap) self.lons_flat = lons2d.flatten() self.lats_flat = lats2d.flatten() self.ncVarDict = ncVarDict self.lons2d = lons2d self.lats2d = lats2d self.counter = 0 self.ax = ax x, y, z = lat_lon.lon_lat_to_cartesian(lons2d.flatten(), lats2d.flatten()) self.kdtree = KDTree(list(zip(x,y,z))) self.sel_time_indices = np.where([start_date <= t <= end_date for t in times])[0] self.times = times[self.sel_time_indices] ax.figure.canvas.mpl_connect("button_press_event", self) def _get_closest_ij(self, event): lon, lat = self.basemap(event.xdata, event.ydata, inverse = True) x0, y0, z0 = lat_lon.lon_lat_to_cartesian(lon, lat) dist, i = self.kdtree.query((x0,y0,z0)) lon0, lat0 = self.lons_flat[i], self.lats_flat[i] ind = np.where((self.lons2d == lon0) & (self.lats2d == lat0)) ix = ind[0][0] jy = ind[1][0] return ix, jy def _plot_timeseries(self, ax, ix, jy): fig_daily = plt.figure() ax_daily = plt.gca() fig_monthly = plt.figure() ax_monthly = plt.gca() for varName, ncVar in self.ncVarDict.items(): sel_values = ncVar[self.sel_time_indices, 0, ix, jy] ax.plot(self.times, sel_values, label = varName) #calculate and plot daily means ts = pd.TimeSeries(index = self.times, data = sel_values) ts = ts.resample("D", how = "mean") ax_daily.plot(ts.index, ts.values, label = varName) #calculate and plot monthly means ts = ts.resample("M", how = "mean") ax_monthly.plot(ts.index, ts.values, label = varName) ax.legend() ax.set_title(str(self.counter)) ax_daily.legend() ax_daily.set_title(str(self.counter) + " - daily") ax_monthly.legend() ax_monthly.set_title(str(self.counter) + " - monthly") assert isinstance(ax, Axes) def __call__(self, event): print(event.xdata, event.ydata) print(event.button) if event.button != 3: return ix, jy = self._get_closest_ij(event) fig = plt.figure() sounding_ax = fig.add_subplot(1,1,1) self._plot_timeseries(sounding_ax, ix, jy) self.ax.annotate(str(self.counter), (event.xdata, event.ydata), font_properties = FontProperties(size=10)) self.ax.redraw_in_frame() self.counter += 1 assert isinstance(fig, Figure) plt.show()
def plot_current_alts(): from . import plot_dpth_to_bdrck bdrck_field = plot_dpth_to_bdrck.get_depth_to_bedrock() start_year = 1980 end_year = 1996 sim_data_folder = "/home/huziy/skynet1_rech3/cordex/CORDEX_DIAG/era40_driven_b1" coord_file = os.path.join( sim_data_folder, "pmNorthAmerica_0.44deg_ERA40-Int_B1_200812_moyenne") #coord_file = "/home/huziy/skynet1_rech3/cordex/CORDEX_DIAG/NA_1.0deg_soil_spinup2/pmNA_1.0deg_soil_spinup2_228006_moyenne" sim_names = ["ERA40", "MPI", "CanESM"] simname_to_path = { #"ERA40": "/home/huziy/skynet1_rech3/cordex/CORDEX_DIAG/era40_driven_b1", "ERA40": "/home/huziy/skynet1_rech3/cordex/CORDEX_DIAG/NorthAmerica_0.44deg_ERA40-Int_old_snow_cond", #"ERA40" : "/home/huziy/skynet1_rech3/cordex/CORDEX_DIAG/NA_1.0deg_soil_spinup2", "MPI": "/home/huziy/skynet1_rech3/cordex/CORDEX_DIAG/NorthAmerica_0.44deg_MPI_B1", "CanESM": "/home/huziy/skynet1_rech3/cordex/CORDEX_DIAG/NorthAmerica_0.44deg_CanESM_B1" } basemap, lons2d, lats2d = draw_regions.get_basemap_and_coords( resolution="c", file_path=coord_file, llcrnrlat=45.0, llcrnrlon=-145, urcrnrlon=-20, urcrnrlat=74, anchor="W") assert isinstance(basemap, Basemap) #basemap.transform_scalar() #basemap = Basemap() lons2d[lons2d > 180] -= 360 x, y = basemap(lons2d, lats2d) #x = (x[1:,1:] + x[:-1, :-1]) /2.0 permafrost_mask = draw_regions.get_permafrost_mask(lons2d, lats2d) mask_cond = (permafrost_mask <= 0) | (permafrost_mask >= 2) # plot_utils.apply_plot_params(width_pt=None, width_cm=20, height_cm=40, font_size=16) fig = plt.figure() assert isinstance(fig, Figure) h_max = 10 bounds = [0, 0.1, 0.5, 1, 2, 3, 4, 5] cmap = my_colormaps.get_lighter_jet_cmap(ncolors=len(bounds) - 1) #cm.get_cmap("jet",10) #cmap = my_colormaps.get_cmap_wo_red(ncolors=len(bounds) - 1) norm = BoundaryNorm(boundaries=bounds, ncolors=len(bounds), clip=True) cmap.set_over(cmap(1.0)) clevels = np.arange(0, h_max + 1, 1) gs = gridspec.GridSpec(3, 2, width_ratios=[1, 0.06], hspace=0, wspace=0.0, left=0.05, bottom=0.02, top=0.95) all_axes = [] all_img = [] i = 0 hc_list = [] for name in sim_names: path = simname_to_path[name] dm = CRCMDataManager(data_folder=path) hc0, t3d_min, t3d_max = dm.get_alt_using_monthly_mean_climatology( list(range(start_year, end_year + 1))) hc_list.append(hc0) ax = fig.add_subplot(gs[i, 0]) #cp = SoundingPlotter(ax, basemap, t3d_min, t3d_max, lons2d, lats2d, levelheights=dm.level_heights) assert isinstance(ax, Axes) hc = np.ma.masked_where(mask_cond | (hc0 < 0), hc0) #hc = np.ma.masked_where( (hc0 < 0), hc0) hc5 = np.ma.masked_where((hc0 <= 15) | hc.mask, hc) img = basemap.pcolormesh(x, y, hc, cmap=cmap, vmax=h_max, norm=norm) if not i: ax.set_title("ALT ({0} - {1}) \n".format(start_year, end_year)) i += 1 ax.set_ylabel("CRCM ({0})".format(name)) all_axes.append(ax) all_img.append(img) print(np.ma.min(hc), np.ma.max(hc)) #hc5 = np.ma.masked_where((hc0 <= 6) | hc.mask, hc) #print "Number of cells with alt > 5 is {0}, and the range is {1} ... {2}".format(hc5.count(), hc5.min(), hc5.max()) #bdrck_field5 = np.ma.masked_where(hc5.mask, bdrck_field) #print "Bedrock ranges for those points: {0} ... {1}".format(bdrck_field5.min(), bdrck_field5.max()) ind = np.where(~hc5.mask) xs = ind[0] ys = ind[1] all_months, all_temps = dm.get_monthly_mean_soiltemps( year_range=range(start_year, end_year + 1)) all_months_ord = date2num(all_months) # mpl.rcParams['contour.negative_linestyle'] = 'solid' # # for the_i, the_j in zip(xs,ys): # #plot profile # plt.figure() # plt.plot(t3d_max[the_i, the_j, :] - dm.T0, dm.level_heights, color = "r") # plt.plot(t3d_min[the_i, the_j, :] - dm.T0, dm.level_heights, color = "b") # plt.plot([0 , 0], [dm.level_heights[0], dm.level_heights[-1]], color = "k") # # x1, x2 = plt.xlim() # #plt.plot( [x1, x2], [bdrck_field[the_i, the_j], bdrck_field[the_i, the_j]], color = "k", lw = 3 ) # #plt.title(str(i) + ", dpth_to_bedrock = {0} m".format(bdrck_field[the_i, the_j])) # plt.title(str(i)) # plt.gca().invert_yaxis() # plt.savefig("prof{0}.png".format(i)) # ax.annotate(str(i), (x[the_i, the_j], y[the_i, the_j]), font_properties = # FontProperties(size=10)) # # # #plot vertical temp cross-section # plt.figure() # plt.title(str(i) + ", ({0} - {1})".format(start_year, end_year)) # # levs2d, times2d = np.meshgrid(dm.level_heights, all_months_ord) # clevs = [-25,-20,-10,-5,-1,0,1,5,10,20,25] # norm = BoundaryNorm(boundaries=clevs, ncolors=len(clevs) - 1) # cmap = cm.get_cmap("jet", len(clevs) - 1) # # img = plt.contourf(times2d, levs2d, all_temps[:,the_i, the_j, :] - dm.T0, levels = clevs, cmap = cmap, norm = norm) # #plt.contour(times2d, levs2d, all_temps[:,the_i, the_j, :] - dm.T0, levels = clevs, colors = "k", linewidth = 1) # the_ax = plt.gca() # assert isinstance(the_ax, Axes) # the_ax.invert_yaxis() # the_ax.xaxis.set_major_formatter(FuncFormatter( # lambda x, pos: num2date(float(x)).strftime("%Y") # )) # # print "i = {0}; lon, lat = {1}, {2}".format(i, lons2d[the_i, the_j], lats2d[the_i, the_j]) # # plt.colorbar(img, ticks = clevs) # # plt.savefig("temp_section_{0}.png".format(i)) # # i += 1 print("lons = [{0}]".format(",".join( [str(x) for x in lons2d[np.array(xs), np.array(ys)]]))) print("lats = [{0}]".format(",".join( [str(x) for x in lats2d[np.array(xs), np.array(ys)]]))) # draw barplot with numbers of alt in given ranges # plt.figure() # alt_ranges = xrange(0,18) # numbers = [] # for the_alt in alt_ranges: # hci = np.ma.masked_where( (hc0 < the_alt) | (hc0 > the_alt + 1) | hc.mask ,hc) # numbers.append(hci.count()) # plt.bar(alt_ranges, numbers, width=1) # plt.xlabel("ALT (m)") # plt.ylabel("Number of cells") # plt.savefig("numbers_in_range.png") i = 0 axs_to_hide = [] #zones and coastlines for the_ax, the_img in zip(all_axes, all_img): # divider = make_axes_locatable(the_ax) # cax = divider.append_axes("right", "5%", pad="3%") assert isinstance(the_ax, Axes) basemap.drawcoastlines(ax=the_ax, linewidth=0.5) basemap.readshapefile("data/pf_4/permafrost8_wgs84/permaice", name="zone", ax=the_ax, linewidth=1.5, drawbounds=False) for nshape, seg in enumerate(basemap.zone): if basemap.zone_info[nshape]["EXTENT"] not in ["C"]: continue poly = mpl.patches.Polygon(seg, edgecolor="k", facecolor="none", zorder=10, lw=1.5) the_ax.add_patch(poly) # if i != 1: # axs_to_hide.append(cax) i += 1 cax = fig.add_subplot(gs[:, 1]) cax.set_anchor("W") cax.set_aspect(35) formatter = FuncFormatter(lambda x, pos: "{0: <6}".format(x)) cb = fig.colorbar(all_img[0], ax=cax, cax=cax, extend="max", ticks=bounds, format=formatter) cax.set_title("m") #fig.tight_layout(h_pad=0) # for the_ax in axs_to_hide: # the_ax.set_visible(False) fig.savefig("alt_from_climatology_current.png") #print ALT for selected points site_names = ["S", "K", "T"] sel_lons = [-75.646, -65.92, -69.95] sel_lats = [62.197, 58.709, 58.67] xo, yo, zo = lat_lon.lon_lat_to_cartesian(sel_lons, sel_lats) xi, yi, zi = lat_lon.lon_lat_to_cartesian(lons2d.flatten(), lats2d.flatten()) ktree = KDTree(list(zip(xi, yi, zi))) dists, indexes = ktree.query(list(zip(xo, yo, zo))) for name, data in zip(sim_names, hc_list): print(name) flat_data = data.flatten() for p_name, ind in zip(site_names, indexes): print(p_name, "{0} m".format(flat_data[ind])) print("--" * 10) pass
class RealWorldMap(object): def __init__(self, patches, projections): self.patches = patches self.projections = projections self.build() def build(self): logger.debug("Building maps for conversion between real and image space") self.imagetree = {} self.imagemap = {} self.realtree = KDTree([x.realcoords for x in self.patches]) self.realmapping = {} for num, patch in enumerate(self.patches): if num % 1000 == 0: logger.debug("Built maps for {0} of {1} patches".format(num, len(self.patches))) resp = {} for _, projection in self.projections.items(): inimage = patch.project(projection) if projection.id not in self.imagetree: self.imagetree[projection.id] = [] self.imagemap[projection.id] = {} self.imagetree[projection.id].append(inimage) self.imagemap[projection.id][tuple(inimage)] = patch.realcoords resp[projection.id] = inimage self.realmapping[tuple(patch.realcoords)] = resp logger.debug("Done building maps for {0} patches".format(len(self.patches))) logger.debug("Building image KD tree") for key, imagecoords in self.imagetree.items(): self.imagetree[key] = KDTree(imagecoords) def realtoimages(self, coords): _, nearestindex = self.realtree.query(coords) nearest = self.realtree.data[nearestindex] return self.realmapping[tuple(nearest)] def realregiontoimages(self, coords): _, nearestindices = self.realtree.query(coords) resp = {} for nearestindex in nearestindices: nearest = self.realtree.data[nearestindex] points = self.realmapping[tuple(nearest)] for k, v in points.iteritems(): if k not in resp: resp[k] = [] resp[k].append(v) return resp def imagetoreal(self, projection, coords): try: projection = projection.id except: pass _, nearestindex = self.imagetree[projection].query(coords) nearest = self.imagetree[projection].data[nearestindex] return self.imagemap[projection][tuple(nearest)]
def compare_alt(): #obs stations = get_station_list() #model data alts_model = [ active_layer_thickness.get_alt_for_year(the_year) for the_year in range(1991, 2001) ] alt_mean = np.mean(alts_model, axis=0) b, lons2d, lats2d = draw_regions.get_basemap_and_coords() permafrost_kinds = draw_regions.get_permafrost_mask(lons2d, lats2d) permafrost_kinds_flat = permafrost_kinds.flatten() lons2d[lons2d > 180] -= 360 #find corresponding indices on the model grid x, y, z = lat_lon.lon_lat_to_cartesian(lons2d.flatten(), lats2d.flatten()) kdtree = KDTree(list(zip(x, y, z))) alt_mean_flat = alt_mean.flatten() h_mod = [] h_obs = [] station_lons = [] station_lats = [] for the_station in stations: x0, y0, z0 = lat_lon.lon_lat_to_cartesian(the_station.lon, the_station.lat) d, i = kdtree.query([x0, y0, z0]) if permafrost_kinds_flat[i] not in (1,2): continue print(d, i) print(the_station.mean_alt_m, alt_mean_flat[i]) h_mod.append(alt_mean_flat[i]) h_obs.append(the_station.mean_alt_m) station_lons.append(the_station.lon) station_lats.append(the_station.lat) plot_utils.apply_plot_params(width_pt=None, height_cm=20, width_cm=16, font_size=12) fig = plt.figure() gs = gridspec.GridSpec(2,1) ax = fig.add_subplot(gs[0,0]) ax.plot(h_obs, h_mod, 'o') ax.set_xlabel("Obs.") ax.set_ylabel("Mod.") upper_lim = max(np.max(h_mod), np.max(h_obs)) ax.set_xlim(0, upper_lim + 0.1 * upper_lim) ax.set_ylim(0, upper_lim + 0.1 * upper_lim) ax = fig.add_subplot(gs[1,0]) min_lon, max_lon = min(station_lons), max(station_lons) min_lat, max_lat = min(station_lats), max(station_lats) dx = (max_lon - min_lon) * 0.1 dy = (max_lat - min_lat) * 0.6 min_lon -= dx max_lon += dx min_lat -= dy max_lat += dy lon1 = -97 lat1 = 47.50 lon2 = -7 lat2 = 0 b_zoom = Basemap(projection="omerc", resolution="l", llcrnrlon=min_lon, llcrnrlat=min_lat, urcrnrlon=max_lon, urcrnrlat=max_lat, lat_1=lat1, lon_1=lon1, lat_2=lat2, lon_2=lon2, no_rot=True ) s_x, s_y = b_zoom(station_lons, station_lats) b_zoom.scatter(s_x, s_y, c = "r", ax = ax, marker = "*", s = 30, linewidths = 0.1, zorder = 2) b_zoom.drawcoastlines(ax = ax, linewidth = 0.5) fig.savefig("pf_validate.png") pass
class TimeseriesPlotter: def __init__(self, name_to_date_to_field, basemap, lons2d, lats2d, ax = None, cell_area = None, cell_manager = None, data_manager = None): self.gwdi_mean_field = None self.traf_mean_field = None self.tdra_mean_field = None self.upin_mean_field = None self.basemap = basemap self.date_to_stfl_field = name_to_date_to_field["STFL"] self.date_to_traf_field = name_to_date_to_field["TRAF"] self.date_to_tdra_field = name_to_date_to_field["TDRA"] self.date_to_pr_field = name_to_date_to_field["PR"] self.date_to_swe_field = name_to_date_to_field["I5"] self.date_to_swst_field = name_to_date_to_field["SWST"] #self.date_to_imav_field = name_to_date_to_field["IMAV"] self.acc_area_km2 = name_to_date_to_field["FACC"] #:type : CellManager self.cell_manager = cell_manager assert isinstance(self.cell_manager, CellManager) self.cell_area = cell_area x, y, z = lat_lon.lon_lat_to_cartesian(lons2d.flatten(), lats2d.flatten()) self.kdtree = KDTree(list(zip(x,y,z))) ax.figure.canvas.mpl_connect("button_press_event", self) self.ax = ax self.lons2d = lons2d self.lats2d = lats2d self.data_manager = data_manager assert isinstance(self.data_manager, Crcm5ModelDataManager) self.x_pr, self.y_pr = basemap(lons2d, lats2d) self.lons_flat = lons2d.flatten() self.lats_flat = lats2d.flatten() self.dates_sorted = list( sorted(list(name_to_date_to_field.items())[0][1].keys()) ) self.counter = 0 self.date_to_swsr_field = name_to_date_to_field["SWSR"] self.date_to_swsl_field = name_to_date_to_field["SWSL"] #self.date_to_gwdi_field = name_to_date_to_field["GWDI"] self.date_to_upin_field = name_to_date_to_field["UPIN"] #static fields self.slope = name_to_date_to_field["SLOP"] self.channel_length = name_to_date_to_field["LENG"] self.lake_outlet = name_to_date_to_field["LKOU"] self.coef_bf = -np.ones(self.slope.shape) good_points = self.slope >= 0 self.coef_bf[good_points] = (self.slope[good_points]) ** 0.5 / ((self.channel_length[good_points]) ** (4.0/3.0) * data_manager.manning_bf[good_points] ) def _get_closest_ij(self, event): lon, lat = self.basemap(event.xdata, event.ydata, inverse = True) x0, y0, z0 = lat_lon.lon_lat_to_cartesian(lon, lat) dist, i = self.kdtree.query((x0,y0,z0)) lon0, lat0 = self.lons_flat[i], self.lats_flat[i] ind = np.where((self.lons2d == lon0) & (self.lats2d == lat0)) ix = ind[0][0] jy = ind[1][0] return ix, jy def __call__(self,event): if event.button != 3: return i,j = self._get_closest_ij( event ) vals = [ self.date_to_stfl_field[d][i,j] for d in self.dates_sorted ] plt.figure() plt.plot(self.dates_sorted, vals, label = "STFL") mask = self.cell_manager.get_mask_of_cells_connected_with(self.cell_manager.cells[i][j]) print("sum(mask) = ", np.sum(mask)) vals1 = [ np.sum( self.date_to_traf_field[d][mask == 1] ) for d in self.dates_sorted ] plt.plot(self.dates_sorted, vals1, label = "TRAF") vals2 = [ np.sum( self.date_to_tdra_field[d][mask == 1] ) for d in self.dates_sorted ] plt.plot(self.dates_sorted, vals2, label = "TDRA") vals3 = [ np.sum( self.date_to_pr_field[d][mask == 1] ) for d in self.dates_sorted ] plt.plot(self.dates_sorted, vals3, label = "PR") #vals4 = [ # np.sum( self.date_to_gwdi_field[d][mask == 1] ) for d in self.dates_sorted #] #plt.plot(self.dates_sorted, vals4, label = "GWDI") vals5 = [ np.sum( self.date_to_upin_field[d][i,j] ) for d in self.dates_sorted ] plt.plot(self.dates_sorted, vals5, label = "UPIN") if self.upin_mean_field is None: self.upin_mean_field = np.mean(list(self.date_to_upin_field.values()), axis = 0) plt.legend() plt.title("{0}: acc={1} km**2".format(self.counter, self.acc_area_km2[i, j])) # plt.figure() # ax1 = plt.gca() # to_plot_2d = np.ma.masked_where(mask < 0.5, self.upin_mean_field) # img = self.basemap.pcolormesh(self.x_pr, self.y_pr, to_plot_2d, ax = ax1) # plt.colorbar(img, ax = ax1) # self.basemap.drawcoastlines(ax = ax1) # plt.title("min-max: {0};{1}".format(to_plot_2d.min(), to_plot_2d.max())) # # self.ax.annotate(str(self.counter), (event.xdata, event.ydata), font_properties = # FontProperties(size=10), bbox=dict(boxstyle="round", fc="w")) # self.ax.redraw_in_frame() # # plt.figure() # ax1 = plt.gca() # to_plot_2d = np.ma.masked_where(mask < 0.5, self.data_manager.cbf) # img = self.basemap.pcolormesh(self.x_pr, self.y_pr, to_plot_2d, ax = ax1) # plt.colorbar(img, ax = ax1) # self.basemap.drawcoastlines(ax = ax1) # # # plt.title("CBF, {0:g}: v= {1}, min={2}, max={3}".format(self.counter, to_plot_2d[i,j], to_plot_2d.min(), to_plot_2d.max())) # # # plt.figure() # ax1 = plt.gca() # to_plot_2d = np.ma.masked_where(mask < 0.5, self.data_manager.bankfull_storage_m3) # img = self.basemap.pcolormesh(self.x_pr, self.y_pr, to_plot_2d, ax = ax1) # plt.colorbar(img, ax = ax1) # self.basemap.drawcoastlines(ax = ax1) # plt.title("STBM, {0}: v= {1}".format(self.counter, to_plot_2d[i,j])) # # # plt.figure() # ax1 = plt.gca() # mbf = self.data_manager.manning_bf # to_plot_2d = np.ma.masked_where(mask < 0.5, mbf) # img = self.basemap.pcolormesh(self.x_pr, self.y_pr, to_plot_2d, ax = ax1) # plt.colorbar(img, ax = ax1) # self.basemap.drawcoastlines(ax = ax1) # plt.title("MABF, {0}: v= {1}, min={2}, max={3}".format(self.counter, to_plot_2d[i,j], to_plot_2d.min(), to_plot_2d.max())) # # plt.figure() # ax1 = plt.gca() # to_plot_2d = np.ma.masked_where(mask < 0.5, self.slope) # img = self.basemap.pcolormesh(self.x_pr, self.y_pr, to_plot_2d, ax = ax1) # plt.colorbar(img, ax = ax1) # self.basemap.drawcoastlines(ax = ax1) # plt.title("SLOPe, {0}: v= {1}, min={2}, max={3}".format(self.counter, to_plot_2d[i,j], to_plot_2d.min(), to_plot_2d.max())) # # # # # plt.figure() # ax1 = plt.gca() # to_plot_2d = np.ma.masked_where(mask < 0.5, self.data_manager.lake_area) # img = self.basemap.pcolormesh(self.x_pr, self.y_pr, to_plot_2d, ax = ax1) # plt.colorbar(img, ax = ax1) # self.basemap.drawcoastlines(ax = ax1) # plt.title("lake area, {0}: v= {1}".format(self.counter, to_plot_2d[i,j])) # # plt.figure() # ax1 = plt.gca() # to_plot_2d = np.ma.masked_where(mask < 0.5, self.coef_bf) # img = self.basemap.pcolormesh(self.x_pr, self.y_pr, to_plot_2d, ax = ax1) # plt.colorbar(img, ax = ax1) # self.basemap.drawcoastlines(ax = ax1) # plt.title("coef_bf, {0}: v= {1:.1g}, min={2:.1g}, max={3:.1g}".format(self.counter, to_plot_2d[i,j], to_plot_2d.min(), to_plot_2d.max())) # plt.figure() #snow vals6 = [ np.sum( self.date_to_swe_field[d][mask == 1] ) for d in self.dates_sorted ] plt.plot(self.dates_sorted, vals6, label = "SWE") vals4 = [ np.sum( self.date_to_swe_field[d][i,j] ) for d in self.dates_sorted ] plt.plot(self.dates_sorted, vals4, label = "GWST") vals5 = [ np.sum( self.date_to_swsr_field[d][i,j] ) for d in self.dates_sorted ] plt.plot(self.dates_sorted, vals5, label = "SWSR") vals5 = [ np.sum( self.date_to_swsl_field[d][i,j] ) for d in self.dates_sorted ] plt.plot(self.dates_sorted, vals5, label = "SWSL") plt.legend() plt.title("{0}, lkfr = {1}".format(self.counter, self.data_manager.lake_fraction[i,j])) fName = "route_params_{0}_{1}.bin".format(i, j) info = {} #traf -> dict( date -> value in m**3/s ) traf_dict = dict(list(zip(self.dates_sorted, [self.date_to_traf_field[d][i,j] for d in self.dates_sorted])) ) traf_dict = {"TRAF": traf_dict} info.update(traf_dict) upin_dict = dict(list(zip(self.dates_sorted, [self.date_to_upin_field[d][i,j] for d in self.dates_sorted])) ) upin_dict = {"UPIN": upin_dict} info.update(upin_dict) # gwdi_dict = dict(zip(self.dates_sorted, # [self.date_to_gwdi_field[d][i,j] for d in self.dates_sorted]) ) # gwdi_dict = {"GWDI": gwdi_dict} # info.update(gwdi_dict) swsr_dict = dict(list(zip(self.dates_sorted, [self.date_to_swsr_field[d][i,j] for d in self.dates_sorted])) ) swsr_dict = {"SWSR": swsr_dict } info.update(swsr_dict) swsl_dict = dict(list(zip(self.dates_sorted, [self.date_to_swsl_field[d][i,j] for d in self.dates_sorted])) ) swsl_dict = {"SWSL": swsl_dict } info.update(swsl_dict) stfl_dict = dict(list(zip(self.dates_sorted, [self.date_to_stfl_field[d][i,j] for d in self.dates_sorted])) ) stfl_dict = {"STFL": stfl_dict } info.update(stfl_dict) swst_dict = dict(list(zip(self.dates_sorted, [self.date_to_swst_field[d][i,j] for d in self.dates_sorted])) ) swst_dict = {"SWST": swst_dict } info.update(swst_dict) info["SBFM"] = self.data_manager.bankfull_storage_m3[i,j] info["CBF"] = self.data_manager.cbf[i,j] info["LKFR"] = self.data_manager.lake_fraction[i,j] info["LKAR"] = self.data_manager.lake_area[i,j] info["LKOU"] = self.lake_outlet[i,j] pickle.dump(info, open(fName, mode="w")) self.counter += 1 plt.show() pass
class TLDetector(object): def __init__(self): rospy.init_node('tl_detector') self.current_pose = None self.current_pose_idx = None self.base_waypoints = None self.base_waypoints_2d = None self.base_waypoints_kdtree = None self.camera_image = None self.has_image = False self.all_traffic_lights = [] self.state = TrafficLight.UNKNOWN self.last_state = TrafficLight.UNKNOWN self.last_wp = -1 self.state_count = 0 self.init_complete = False self.current_state = TLDetectorState.INIT config_string = rospy.get_param("/traffic_light_config") self.config = yaml.load(config_string) self.bridge = CvBridge() self.light_classifier = TLClassifier() self.listener = tf.TransformListener() sub1 = rospy.Subscriber('/current_pose', PoseStamped, self.pose_cb) sub2 = rospy.Subscriber('/base_waypoints', Lane, self.waypoints_cb) ''' /vehicle/traffic_lights provides you with the location of the traffic light in 3D map space and helps you acquire an accurate ground truth data source for the traffic light classifier by sending the current color state of all traffic lights in the simulator. When testing on the vehicle, the color state will not be available. You'll need to rely on the position of the light and the camera image to predict it. ''' sub3 = rospy.Subscriber('/vehicle/traffic_lights', TrafficLightArray, self.traffic_cb) sub6 = rospy.Subscriber('/image_color', Image, self.image_cb) self.upcoming_red_light_pub = rospy.Publisher('/traffic_waypoint', Int32, queue_size=1) self.run_tl_detector_state_machine() # rospy.spin() def run_tl_detector_state_machine(self): rate = rospy.Rate(TL_DETECTOR_RATE_HZ) # loop the state machine while the system is running while not rospy.is_shutdown(): if self.current_state == TLDetectorState.INIT: # ensure that the initialization is complete before exiting this state # rospy.loginfo("TLDetectorState.INIT ... ") self.init_complete = (self.base_waypoints and self.current_pose and self.all_traffic_lights) if self.init_complete: # rospy.loginfo("TLDetectorState initialization complete...") # rospy.loginfo("TLDetectorState switching state to RUN...") self.current_state = TLDetectorState.RUN elif self.current_state == TLDetectorState.RUN: # rospy.loginfo("TLDetectorState.RUN ... ") # if initialization is complete, publish the final waypoints for this cycle if self.init_complete: # TODO: Ashish uncomment camera image check # check whether we have an image if not self.has_image or not self.camera_image: continue stop_line_wp_idx, state = self.process_traffic_lights() ''' Publish upcoming red lights at camera frequency. Each predicted state has to occur `STATE_COUNT_THRESHOLD` number of times till we start using it. Otherwise the previous stable state is used. ''' if self.state != state: self.state_count = 0 self.state = state elif self.state_count >= STATE_COUNT_THRESHOLD: self.last_state = self.state stop_line_wp_idx = stop_line_wp_idx if state == TrafficLight.RED else -1 self.last_wp = stop_line_wp_idx self.upcoming_red_light_pub.publish(Int32(stop_line_wp_idx)) else: self.upcoming_red_light_pub.publish(Int32(self.last_wp)) self.state_count += 1 # rospy.loginfo("TL_DETECTOR: Car WP: {0} ** Closest light wp: {1} ** light state: {2}".format( # self.current_pose_idx, stop_line_wp_idx, state)) else: # switch default state self.current_state = TLDetectorState.INIT rate.sleep() def pose_cb(self, msg): self.current_pose = msg # also get the current car position index if self.current_pose and self.base_waypoints_2d: x = self.current_pose.pose.position.x y = self.current_pose.pose.position.y self.current_pose_idx = self.get_closest_waypoint_index(x, y) def waypoints_cb(self, msg): # self.waypoints = waypoints # rospy.loginfo("TL_DETECTOR: waypoints_cb called...") self.base_waypoints = msg.waypoints if self.base_waypoints is not None: # populate the Lane msg header as well (not really required, but to be consistent) # self.lane_msg_header = msg.header # get the 2d (x, y) waypoints self.base_waypoints_2d = [[wp.pose.pose.position.x, wp.pose.pose.position.y] for wp in self.base_waypoints] # build a KDTree for efficient search of nearest waypoint self.base_waypoints_kdtree = KDTree(self.base_waypoints_2d) # rospy.loginfo("TL_DETECTOR: Base waypoints loaded...") def traffic_cb(self, msg): self.all_traffic_lights = msg.lights def image_cb(self, msg): """Identifies red lights in the incoming camera image and publishes the index of the waypoint closest to the red light's stop line to /traffic_waypoint Args: msg (Image): image from car-mounted camera """ # rospy.loginfo("TL_DETECTOR: image_cb called...") self.has_image = True self.camera_image = msg.data ## TODO: Ashish - remove - moved to TL Detector state machine # light_wp, state = self.process_traffic_lights() # # rospy.loginfo("Closest light wp: {0} \t light state: {1}".format(light_wp, state)) # # ''' # Publish upcoming red lights at camera frequency. # Each predicted state has to occur `STATE_COUNT_THRESHOLD` number # of times till we start using it. Otherwise the previous stable state is # used. # ''' # if self.state != state: # self.state_count = 0 # self.state = state # elif self.state_count >= STATE_COUNT_THRESHOLD: # self.last_state = self.state # light_wp = light_wp if state == TrafficLight.RED else -1 # self.last_wp = light_wp # self.upcoming_red_light_pub.publish(Int32(light_wp)) # else: # self.upcoming_red_light_pub.publish(Int32(self.last_wp)) # self.state_count += 1 def get_closest_waypoint_index(self, x, y, ahead=True): ''' Returns the index of closest waypoint ahead or behind of the x,y position :param x: x coordinate of the position :param y: y coordinate of the position :param ahead: if True, return waypoint that ahead of the position, else the one behind :return: index of the closest waypoint ''' # Note: here we need to check ensure that the waypoint is before the pose as the pose is going to be the # position of the traffic light stop line, and we need to stop before the line in case it a red light # query for one nearest point to x,y # idx 0 contains distance to closest point, idx 1 has the closest point index closest_idx = self.base_waypoints_kdtree.query([x, y], 1)[1] # check whether the closest point is ahead or behind closest_waypoint_coord = self.base_waypoints_2d[closest_idx] prev_waypoint_coord = self.base_waypoints_2d[closest_idx-1] # equation of the hyperplane throught closest waypoint coordinates cl_vect = np.array(closest_waypoint_coord) prev_vect = np.array(prev_waypoint_coord) curr_pos_vect = np.array([x, y]) # this dot product will be positive if the closest point is behind the position, # and will be negative if its ahead of the position val = np.dot((cl_vect - prev_vect), (curr_pos_vect - cl_vect)) # if we want the closest ahead and the wp is behind the position, get the next wp if ahead and val > 0: closest_idx = (closest_idx + 1) % len(self.base_waypoints_2d) # if we want the closest behind and the wp is ahead of the position, get the previous wp if not ahead and val < 0: closest_idx = (closest_idx - 1) # 'idx of -1 is fine as python will wrap back to the last array element' return closest_idx def get_light_state(self, light): """Determines the current color of the traffic light Args: light (TrafficLight): light to classify Returns: int: ID of traffic light color (specified in styx_msgs/TrafficLight) """ # for testing, just return the light state for now! return light.state # TODO: ASHISH USE THE TL_CLASSIFIER HERE!! # if(not self.has_image): # self.prev_light_loc = None # return False # # cv_image = self.bridge.imgmsg_to_cv2(self.camera_image, "bgr8") # # #Get classification # return self.light_classifier.get_classification(cv_image) def process_traffic_lights(self): """Finds closest visible traffic light, if one exists, and determines its location and color Returns: int: index of waypoint closes to the upcoming stop line for a traffic light (-1 if none exists) int: ID of traffic light color (specified in styx_msgs/TrafficLight) """ # light = None closest_light = None stop_line_wp_idx = -1 # List of positions that correspond to the line to stop in front of for a given intersection stop_line_positions = self.config['stop_line_positions'] if self.current_pose_idx: # # car_position = self.get_closest_waypoint(self.current_pose.pose) # x = self.current_pose.pose.position.x # y = self.current_pose.pose.position.y # self.current_pose_idx = self.get_closest_waypoint_index(x, y) #TODO find the closest visible traffic light (if one exists) # we need to find the closest traffic light location, so start with the max difference # in distance in terms of the waypoint indices diff = len(self.base_waypoints) # find the closest traffic light (i.e. stop line of the traffic light) from the list of all traffic lights for i, traffic_light in enumerate(self.all_traffic_lights): tl_stop_line = stop_line_positions[i] temp_wp_idx = self.get_closest_waypoint_index(tl_stop_line[0], tl_stop_line[1], ahead=False) # find the closest stop line waypoint index d = temp_wp_idx - self.current_pose_idx if 0 <= d < diff: diff = d closest_light = traffic_light stop_line_wp_idx = temp_wp_idx if closest_light: state = self.get_light_state(closest_light) return stop_line_wp_idx, state return stop_line_wp_idx, TrafficLight.UNKNOWN
class TimeseriesPlotter: def __init__(self, name_to_date_to_field, basemap, lons2d, lats2d, ax=None, cell_area=None, cell_manager=None, data_manager=None): self.gwdi_mean_field = None self.traf_mean_field = None self.tdra_mean_field = None self.upin_mean_field = None self.basemap = basemap self.date_to_stfl_field = name_to_date_to_field["STFL"] self.date_to_traf_field = name_to_date_to_field["TRAF"] self.date_to_tdra_field = name_to_date_to_field["TDRA"] self.date_to_pr_field = name_to_date_to_field["PR"] self.date_to_swe_field = name_to_date_to_field["I5"] self.date_to_swst_field = name_to_date_to_field["SWST"] #self.date_to_imav_field = name_to_date_to_field["IMAV"] self.acc_area_km2 = name_to_date_to_field["FACC"] #:type : CellManager self.cell_manager = cell_manager assert isinstance(self.cell_manager, CellManager) self.cell_area = cell_area x, y, z = lat_lon.lon_lat_to_cartesian(lons2d.flatten(), lats2d.flatten()) self.kdtree = KDTree(list(zip(x, y, z))) ax.figure.canvas.mpl_connect("button_press_event", self) self.ax = ax self.lons2d = lons2d self.lats2d = lats2d self.data_manager = data_manager assert isinstance(self.data_manager, Crcm5ModelDataManager) self.x_pr, self.y_pr = basemap(lons2d, lats2d) self.lons_flat = lons2d.flatten() self.lats_flat = lats2d.flatten() self.dates_sorted = list( sorted(list(name_to_date_to_field.items())[0][1].keys())) self.counter = 0 self.date_to_swsr_field = name_to_date_to_field["SWSR"] self.date_to_swsl_field = name_to_date_to_field["SWSL"] #self.date_to_gwdi_field = name_to_date_to_field["GWDI"] self.date_to_upin_field = name_to_date_to_field["UPIN"] #static fields self.slope = name_to_date_to_field["SLOP"] self.channel_length = name_to_date_to_field["LENG"] self.lake_outlet = name_to_date_to_field["LKOU"] self.coef_bf = -np.ones(self.slope.shape) good_points = self.slope >= 0 self.coef_bf[good_points] = (self.slope[good_points])**0.5 / ( (self.channel_length[good_points])**(4.0 / 3.0) * data_manager.manning_bf[good_points]) def _get_closest_ij(self, event): lon, lat = self.basemap(event.xdata, event.ydata, inverse=True) x0, y0, z0 = lat_lon.lon_lat_to_cartesian(lon, lat) dist, i = self.kdtree.query((x0, y0, z0)) lon0, lat0 = self.lons_flat[i], self.lats_flat[i] ind = np.where((self.lons2d == lon0) & (self.lats2d == lat0)) ix = ind[0][0] jy = ind[1][0] return ix, jy def __call__(self, event): if event.button != 3: return i, j = self._get_closest_ij(event) vals = [self.date_to_stfl_field[d][i, j] for d in self.dates_sorted] plt.figure() plt.plot(self.dates_sorted, vals, label="STFL") mask = self.cell_manager.get_mask_of_cells_connected_with( self.cell_manager.cells[i][j]) print("sum(mask) = ", np.sum(mask)) vals1 = [ np.sum(self.date_to_traf_field[d][mask == 1]) for d in self.dates_sorted ] plt.plot(self.dates_sorted, vals1, label="TRAF") vals2 = [ np.sum(self.date_to_tdra_field[d][mask == 1]) for d in self.dates_sorted ] plt.plot(self.dates_sorted, vals2, label="TDRA") vals3 = [ np.sum(self.date_to_pr_field[d][mask == 1]) for d in self.dates_sorted ] plt.plot(self.dates_sorted, vals3, label="PR") #vals4 = [ # np.sum( self.date_to_gwdi_field[d][mask == 1] ) for d in self.dates_sorted #] #plt.plot(self.dates_sorted, vals4, label = "GWDI") vals5 = [ np.sum(self.date_to_upin_field[d][i, j]) for d in self.dates_sorted ] plt.plot(self.dates_sorted, vals5, label="UPIN") if self.upin_mean_field is None: self.upin_mean_field = np.mean(list( self.date_to_upin_field.values()), axis=0) plt.legend() plt.title("{0}: acc={1} km**2".format(self.counter, self.acc_area_km2[i, j])) # plt.figure() # ax1 = plt.gca() # to_plot_2d = np.ma.masked_where(mask < 0.5, self.upin_mean_field) # img = self.basemap.pcolormesh(self.x_pr, self.y_pr, to_plot_2d, ax = ax1) # plt.colorbar(img, ax = ax1) # self.basemap.drawcoastlines(ax = ax1) # plt.title("min-max: {0};{1}".format(to_plot_2d.min(), to_plot_2d.max())) # # self.ax.annotate(str(self.counter), (event.xdata, event.ydata), font_properties = # FontProperties(size=10), bbox=dict(boxstyle="round", fc="w")) # self.ax.redraw_in_frame() # # plt.figure() # ax1 = plt.gca() # to_plot_2d = np.ma.masked_where(mask < 0.5, self.data_manager.cbf) # img = self.basemap.pcolormesh(self.x_pr, self.y_pr, to_plot_2d, ax = ax1) # plt.colorbar(img, ax = ax1) # self.basemap.drawcoastlines(ax = ax1) # # # plt.title("CBF, {0:g}: v= {1}, min={2}, max={3}".format(self.counter, to_plot_2d[i,j], to_plot_2d.min(), to_plot_2d.max())) # # # plt.figure() # ax1 = plt.gca() # to_plot_2d = np.ma.masked_where(mask < 0.5, self.data_manager.bankfull_storage_m3) # img = self.basemap.pcolormesh(self.x_pr, self.y_pr, to_plot_2d, ax = ax1) # plt.colorbar(img, ax = ax1) # self.basemap.drawcoastlines(ax = ax1) # plt.title("STBM, {0}: v= {1}".format(self.counter, to_plot_2d[i,j])) # # # plt.figure() # ax1 = plt.gca() # mbf = self.data_manager.manning_bf # to_plot_2d = np.ma.masked_where(mask < 0.5, mbf) # img = self.basemap.pcolormesh(self.x_pr, self.y_pr, to_plot_2d, ax = ax1) # plt.colorbar(img, ax = ax1) # self.basemap.drawcoastlines(ax = ax1) # plt.title("MABF, {0}: v= {1}, min={2}, max={3}".format(self.counter, to_plot_2d[i,j], to_plot_2d.min(), to_plot_2d.max())) # # plt.figure() # ax1 = plt.gca() # to_plot_2d = np.ma.masked_where(mask < 0.5, self.slope) # img = self.basemap.pcolormesh(self.x_pr, self.y_pr, to_plot_2d, ax = ax1) # plt.colorbar(img, ax = ax1) # self.basemap.drawcoastlines(ax = ax1) # plt.title("SLOPe, {0}: v= {1}, min={2}, max={3}".format(self.counter, to_plot_2d[i,j], to_plot_2d.min(), to_plot_2d.max())) # # # # # plt.figure() # ax1 = plt.gca() # to_plot_2d = np.ma.masked_where(mask < 0.5, self.data_manager.lake_area) # img = self.basemap.pcolormesh(self.x_pr, self.y_pr, to_plot_2d, ax = ax1) # plt.colorbar(img, ax = ax1) # self.basemap.drawcoastlines(ax = ax1) # plt.title("lake area, {0}: v= {1}".format(self.counter, to_plot_2d[i,j])) # # plt.figure() # ax1 = plt.gca() # to_plot_2d = np.ma.masked_where(mask < 0.5, self.coef_bf) # img = self.basemap.pcolormesh(self.x_pr, self.y_pr, to_plot_2d, ax = ax1) # plt.colorbar(img, ax = ax1) # self.basemap.drawcoastlines(ax = ax1) # plt.title("coef_bf, {0}: v= {1:.1g}, min={2:.1g}, max={3:.1g}".format(self.counter, to_plot_2d[i,j], to_plot_2d.min(), to_plot_2d.max())) # plt.figure() #snow vals6 = [ np.sum(self.date_to_swe_field[d][mask == 1]) for d in self.dates_sorted ] plt.plot(self.dates_sorted, vals6, label="SWE") vals4 = [ np.sum(self.date_to_swe_field[d][i, j]) for d in self.dates_sorted ] plt.plot(self.dates_sorted, vals4, label="GWST") vals5 = [ np.sum(self.date_to_swsr_field[d][i, j]) for d in self.dates_sorted ] plt.plot(self.dates_sorted, vals5, label="SWSR") vals5 = [ np.sum(self.date_to_swsl_field[d][i, j]) for d in self.dates_sorted ] plt.plot(self.dates_sorted, vals5, label="SWSL") plt.legend() plt.title("{0}, lkfr = {1}".format( self.counter, self.data_manager.lake_fraction[i, j])) fName = "route_params_{0}_{1}.bin".format(i, j) info = {} #traf -> dict( date -> value in m**3/s ) traf_dict = dict( list( zip(self.dates_sorted, [ self.date_to_traf_field[d][i, j] for d in self.dates_sorted ]))) traf_dict = {"TRAF": traf_dict} info.update(traf_dict) upin_dict = dict( list( zip(self.dates_sorted, [ self.date_to_upin_field[d][i, j] for d in self.dates_sorted ]))) upin_dict = {"UPIN": upin_dict} info.update(upin_dict) # gwdi_dict = dict(zip(self.dates_sorted, # [self.date_to_gwdi_field[d][i,j] for d in self.dates_sorted]) ) # gwdi_dict = {"GWDI": gwdi_dict} # info.update(gwdi_dict) swsr_dict = dict( list( zip(self.dates_sorted, [ self.date_to_swsr_field[d][i, j] for d in self.dates_sorted ]))) swsr_dict = {"SWSR": swsr_dict} info.update(swsr_dict) swsl_dict = dict( list( zip(self.dates_sorted, [ self.date_to_swsl_field[d][i, j] for d in self.dates_sorted ]))) swsl_dict = {"SWSL": swsl_dict} info.update(swsl_dict) stfl_dict = dict( list( zip(self.dates_sorted, [ self.date_to_stfl_field[d][i, j] for d in self.dates_sorted ]))) stfl_dict = {"STFL": stfl_dict} info.update(stfl_dict) swst_dict = dict( list( zip(self.dates_sorted, [ self.date_to_swst_field[d][i, j] for d in self.dates_sorted ]))) swst_dict = {"SWST": swst_dict} info.update(swst_dict) info["SBFM"] = self.data_manager.bankfull_storage_m3[i, j] info["CBF"] = self.data_manager.cbf[i, j] info["LKFR"] = self.data_manager.lake_fraction[i, j] info["LKAR"] = self.data_manager.lake_area[i, j] info["LKOU"] = self.lake_outlet[i, j] pickle.dump(info, open(fName, mode="w")) self.counter += 1 plt.show() pass
class RealWorldMap(object): def __init__(self, patches, projections): self.patches = patches self.projections = projections self.build() def build(self): logger.debug( "Building maps for conversion between real and image space") self.imagetree = {} self.imagemap = {} self.realtree = KDTree([x.realcoords for x in self.patches]) self.realmapping = {} for num, patch in enumerate(self.patches): if num % 1000 == 0: logger.debug("Built maps for {0} of {1} patches".format( num, len(self.patches))) resp = {} for _, projection in self.projections.items(): inimage = patch.project(projection) if projection.id not in self.imagetree: self.imagetree[projection.id] = [] self.imagemap[projection.id] = {} self.imagetree[projection.id].append(inimage) self.imagemap[projection.id][tuple(inimage)] = patch.realcoords resp[projection.id] = inimage self.realmapping[tuple(patch.realcoords)] = resp logger.debug("Done building maps for {0} patches".format( len(self.patches))) logger.debug("Building image KD tree") for key, imagecoords in self.imagetree.items(): self.imagetree[key] = KDTree(imagecoords) def realtoimages(self, coords): _, nearestindex = self.realtree.query(coords) nearest = self.realtree.data[nearestindex] return self.realmapping[tuple(nearest)] def realregiontoimages(self, coords): _, nearestindices = self.realtree.query(coords) resp = {} for nearestindex in nearestindices: nearest = self.realtree.data[nearestindex] points = self.realmapping[tuple(nearest)] for k, v in points.iteritems(): if k not in resp: resp[k] = [] resp[k].append(v) return resp def imagetoreal(self, projection, coords): try: projection = projection.id except: pass _, nearestindex = self.imagetree[projection].query(coords) nearest = self.imagetree[projection].data[nearestindex] return self.imagemap[projection][tuple(nearest)]
def convert(inPath, lonlats): ds = gdal.Open(inPath, gdal.GA_ReadOnly) assert isinstance(ds, Dataset) (Xul, deltaX, rotation, Yul, rotation, deltaY) = ds.GetGeoTransform() print(dir(ds)) print(ds.GetMetadata_Dict()) print(ds.GetDescription()) srs_wkt = ds.GetProjection() Nx = ds.RasterXSize Ny = ds.RasterYSize print(ds.RasterCount) nxToRead = Nx / 2 nyToRead = int(Ny / 1.5) data = ds.GetRasterBand(1).ReadAsArray(0, 0, nxToRead, nyToRead).transpose() print(srs_wkt) print(data.shape) #plt.imshow(data) #plt.show() ds = None print(Xul, Yul, deltaX, deltaY, rotation) x1d = np.arange(Xul, Xul + deltaX * nxToRead, deltaX) y1d = np.arange(Yul, Yul + deltaY * nyToRead, deltaY) assert len(x1d) == nxToRead assert len(y1d) == nyToRead y, x = np.meshgrid(y1d, x1d) fieldName = os.path.basename(inPath).split("_")[0].lower() coef = name_to_mult[fieldName] no_data = name_to_nodata_value[fieldName] usable = (data != no_data) print(x.shape, usable.shape) x0 = x[usable] y0 = y[usable] cartx, carty, cartz = lat_lon.lon_lat_to_cartesian(x0, y0) data_1d = data[usable] print("useful data points : {0}".format(len(x0))) tree = KDTree(list(zip(cartx, carty, cartz))) print("constructed the kdtree") xi, yi, zi = lat_lon.lon_lat_to_cartesian(lonlats[:, 0], lonlats[:, 1]) dists, inds = tree.query(list(zip(xi, yi, zi)), k=AGGR_SIZE) npoints = dists.shape[0] interp_data = np.zeros((npoints, )) for i in range(npoints): the_dists = dists[i, :] the_inds = inds[i, :] good_pts = (the_dists < LIMIT_DIST) if len( the_dists[good_pts] ) < 0.25 * AGGR_SIZE: #if there is no usable points in the vicinity, then set the value to no_data interp_data[i] = -1 continue the_dists = the_dists[good_pts] the_inds = the_inds[good_pts] interp_coefs = 1.0 / the_dists**2 interp_data[i] = np.sum( interp_coefs * data_1d[the_inds]) / np.sum(interp_coefs) interp_data[interp_data >= 0] *= coef print("completed interpolation") return interp_data
class SoundingPlotter: def __init__(self, ax , basemap, tmin_3d, tmax_3d, lons2d, lats2d, levelheights = None): """ Plots a vertical profile at the point nearest to the clicked one :type ax: Axes """ assert isinstance(ax, Axes) self.basemap = basemap assert isinstance(self.basemap, Basemap) self.tmin_3d = tmin_3d self.tmax_3d = tmax_3d self.lons2d = lons2d self.lats2d = lats2d self.T0 = 273.15 self.lons_flat = lons2d.flatten() self.lats_flat = lats2d.flatten() self.level_heights = levelheights self.counter = 0 self.ax = ax x, y, z = lat_lon.lon_lat_to_cartesian(lons2d.flatten(), lats2d.flatten()) self.kdtree = KDTree(list(zip(x,y,z))) ax.figure.canvas.mpl_connect("button_press_event", self) pass def _get_closest_ij(self, event): lon, lat = self.basemap(event.xdata, event.ydata, inverse = True) x0, y0, z0 = lat_lon.lon_lat_to_cartesian(lon, lat) dist, i = self.kdtree.query((x0,y0,z0)) lon0, lat0 = self.lons_flat[i], self.lats_flat[i] ind = np.where((self.lons2d == lon0) & (self.lats2d == lat0)) ix = ind[0][0] jy = ind[1][0] return ix, jy def _plot_sounding(self, ax, ix, jy): ax.plot(self.tmax_3d[ix, jy, :] - self.T0, self.level_heights, color = "r") ax.plot(self.tmin_3d[ix, jy, :] - self.T0, self.level_heights, color = "b") ax.plot([0 , 0], [self.level_heights[0], self.level_heights[-1]], color = "k") ax.set_title(str(self.counter)) assert isinstance(ax, Axes) ax.invert_yaxis() def __call__(self, event): print(event.xdata, event.ydata) print(event.button) if event.button != 3: return ix, jy = self._get_closest_ij(event) fig = plt.figure() sounding_ax = fig.add_subplot(1,1,1) self._plot_sounding(sounding_ax, ix, jy) self.ax.annotate(str(self.counter), (event.xdata, event.ydata), font_properties = FontProperties(size=10)) self.ax.redraw_in_frame() self.counter += 1 assert isinstance(fig, Figure) plt.show() pass
def main(): start_year = 1970 end_year = 1999 stations = cehq_station.read_station_data(folder="data/cehq_measure_data_all") stations = list( itertools.ifilter( lambda s: s.is_natural, stations) ) for s in stations: s.delete_data_after_year(end_year) s.delete_data_before_year(start_year) pass stations = list( itertools.ifilter(lambda s: s.get_num_of_years_with_continuous_data() >= 10, stations) ) s = stations[0] assert isinstance(s, Station) #stations = list( itertools.ifilter(lambda s: s.is_natural, stations) ) x, y = polar_stereographic.lons, polar_stereographic.lats basemap = polar_stereographic.basemap x, y = basemap(x,y) sx = [s.longitude for s in stations] sy = [s.latitude for s in stations] sx, sy = basemap(sx, sy) #read model data model_file_path = "data/streamflows/hydrosheds_euler9/aex_discharge_1970_01_01_00_00.nc" acc_area = data_select.get_field_from_file(path=model_file_path, field_name="drainage") i_indices, j_indices = data_select.get_indices_from_file(path=model_file_path) lons_1d = data_select.get_field_from_file(path=model_file_path, field_name="longitude") lats_1d = data_select.get_field_from_file(path=model_file_path, field_name="latitude") x1d, y1d, z1d = lat_lon.lon_lat_to_cartesian(lons_1d, lats_1d) kdtree = KDTree(zip(x1d, y1d, z1d)) print "Id: 4 DA (km2) <-> 4 dist (km) <-> 4 (i,j)" #basemap.scatter(sx, sy, c = "r", zorder = 5) for s, isx, isy in zip( stations, sx, sy ): assert isinstance(s, Station) plt.annotate(s.id, xy=(isx, isy), bbox = dict(facecolor = 'white'), weight = "bold", font_properties = FontProperties(size=0.5)) #get model drainaige areas for the four closest gridcells to the station x0, y0, z0 = lat_lon.lon_lat_to_cartesian(s.longitude, s.latitude) dists, indices = kdtree.query([x0, y0, z0], k = 4) dists /= 1000 print("{0}: {1:.1f}; {2:.1f}; {3:.1f}; {4:.1f} <-> {5:.1f}; {6:.1f}; {7:.1f}; {8:.1f} <-> {9};{10};{11};{12}".format( "{0} (S_DA = {1:.1f})".format(s.id, s.drainage_km2), float(acc_area[indices[0]]), float(acc_area[indices[1]]), float(acc_area[indices[2]]), float(acc_area[indices[3]]), float( dists[0] ), float(dists[1]), float(dists[2]), float(dists[3]), "({0}, {1})".format(i_indices[indices[0]] + 1, j_indices[indices[0]] + 1), "({0}, {1})".format(i_indices[indices[1]] + 1, j_indices[indices[1]] + 1), "({0}, {1})".format(i_indices[indices[2]] + 1, j_indices[indices[2]] + 1), "({0}, {1})".format(i_indices[indices[3]] + 1, j_indices[indices[3]] + 1) )) basemap.drawcoastlines(linewidth=0.5) xmin, xmax = min(sx), max(sx) ymin, ymax = min(sy), max(sy) marginx = (xmax - xmin) * 0.1 marginy = (ymax - ymin) * 0.1 xmin -= marginx * 1.5 xmax += marginx * 2 ymin -= marginy ymax += marginy * 2 plt.xlim(xmin, xmax) plt.ylim(ymin, ymax) plt.tight_layout() basin_boundaries.plot_basin_boundaries_from_shape(basemap=basemap, plotter=plt, linewidth=1) plt.savefig("10yr_cont_stations_natural_fs0.5.pdf") #plt.show() pass
filename = '02' las = laspy.file.File(path + filename + '.las', mode='r') coords = np.vstack((las.x, las.y, las.z)).transpose() values = np.vstack((las.classification, las.intensity)).transpose() tree = KDTree(coords) time_delta_0 = datetime.datetime.now() - t0 print('Time read and tree {0}'.format(time_delta_0)) features = [] n = 0 t1 = datetime.datetime.now() for point, value in zip(coords, values): print point [dist, i] = tree.query(point, k=100) #keep = dist < 2 [lambda_3, lambda_2, lambda_1] = scipy.linalg.eigh(np.cov(coords[i].transpose()), eigvals_only=True) # Features linearity = (lambda_1 - lambda_2) / lambda_1 planarity = (lambda_2 - lambda_3) / lambda_1 scattering = lambda_3 / lambda_1 omnivariance = scipy.special.cbrt(lambda_1 * lambda_2 * lambda_3) anisotropy = (lambda_1 - lambda_3) / lambda_1 eigentropy = -(lambda_1 * np.log(lambda_1) + lambda_2 * np.log(lambda_2) + lambda_3 * np.log(lambda_3)) curvature = lambda_3 / (lambda_1 + lambda_2 + lambda_3)
class GldasManager(): def __init__(self, folder_path="/home/huziy/skynet3_exec1/gldas_data"): """ Data access interface to the folder of netcdf files runoff units: kg/m^2/s = mm/s """ self.data_folder = folder_path self.surface_rof_varname = "Qs_GDS0_SFC_ave4h" self.subsurface_rof_varname = "Qsb_GDS0_SFC_ave4h" self.date_format = "%m/%d/%Y (%H:%M)" self._init_date_to_path_dict() self._init_kd_tree() pass def plot_subsrof_ts(self, i=0, j=0): all_dates = list(sorted(self.date_to_path.keys())) vals = [self.get_field_for_date(x, var_name=self.subsurface_rof_varname)[i, j] for x in all_dates] vals1 = [self.get_field_for_date(x, var_name=self.surface_rof_varname)[i, j] for x in all_dates] print(min(vals), max(vals)) dates_num = date2num(all_dates) print(min(dates_num), max(dates_num)) import matplotlib.pyplot as plt plt.figure() plt.plot(dates_num, vals, label="subsurf rof") plt.plot(dates_num, vals1, label="surf rof") plt.legend() #plt.xticks(rotation='vertical') plt.show() def _init_date_to_path_dict(self): self.date_to_path = {} for fName in os.listdir(self.data_folder): if not fName.endswith(".nc"): continue #regard only nectdf files path = os.path.join(self.data_folder, fName) ds = Dataset(path) srofVar = ds.variables[self.surface_rof_varname] date = datetime.strptime(srofVar.initial_time, self.date_format) self.date_to_path[date] = path ds.close() def _init_kd_tree(self): """ Has to be called after self._init_date_to_path_dict """ if not len(self.date_to_path): print("You should call {0} first".format("self._init_date_to_path_dict")) raise Exception() for d, path in self.date_to_path.items(): ds = Dataset(path) lons1d = ds.variables["g0_lon_1"][:] lats1d = ds.variables["g0_lat_0"][:] self.lats2d, self.lons2d = np.meshgrid(lats1d, lons1d) x, y, z = lat_lon.lon_lat_to_cartesian(self.lons2d.flatten(), self.lats2d.flatten()) self.kdtree = KDTree(list(zip(x, y, z))) return pass def get_field_for_month_and_year(self, var_name="", month=None, year=None): d1 = datetime(year=year, month=month, day=1) path = self.date_to_path[d1] ds = Dataset(path) return ds.variables[var_name][:] def get_field_for_date(self, the_date, var_name=""): path = self.date_to_path[the_date] ds = Dataset(path) data = ds.variables[var_name][:].transpose() # transpose because I allways use (lon, lat) order of coordinates ds.close() return data def get_srof_spat_integrals_over_points_in_time(self, lons2d_target, lats2d_target, mask, areas2d, start_date=None, end_date=None): return self._get_spatial_integrals_over_points_in_time(lons2d_target, lats2d_target, mask, areas2d, start_date=start_date, end_date=end_date, var_name=self.surface_rof_varname) def get_subsrof_spat_integrals_over_points_in_time(self, lons2d_target, lats2d_target, mask, areas2d, start_date=None, end_date=None): return self._get_spatial_integrals_over_points_in_time(lons2d_target, lats2d_target, mask, areas2d, start_date=start_date, end_date=end_date, var_name=self.subsurface_rof_varname) def _get_spatial_integrals_over_points_in_time(self, lons2d_target, lats2d_target, mask, areas2d, start_date=None, end_date=None, var_name=""): """ i) Interpolate to the grid (lons2d_target, lats2d_target) ii) Apply the mask to the interpoated fields and sum with coefficients from areas2d Note: the interpolation is done using nearest neighbor approach returns a timeseries of {t -> sum(Ai[mask]*xi[mask])(t)} """ #interpolation x1, y1, z1 = lat_lon.lon_lat_to_cartesian(lons2d_target.flatten(), lats2d_target.flatten()) dists, indices = self.kdtree.query(list(zip(x1, y1, z1))) mask1d = mask.flatten().astype(int) areas1d = areas2d.flatten() result = {} for the_date in list(self.date_to_path.keys()): if start_date is not None: if start_date > the_date: continue if end_date is not None: if end_date < the_date: continue data = self.get_field_for_date(the_date, var_name=var_name) result[the_date] = np.sum(data.flatten()[indices][mask1d == 1] * areas1d[mask1d == 1]) times = list(sorted(result.keys())) values = [result[x] for x in times] print("nvals, min, max", len(values), min(values), max(values)) return TimeSeries(time=times, data=values)
def get_mri_surf2surf_matrix(source_subj, hemi, surface_type, target_subj='fsaverage', subjects_dir=None, n_neighbors=20, random_state=0, n_test_images=40, coef_threshold=None, renormalize=True): """Creates a matrix implementing freesurfer mri_surf2surf command. A surface-to-surface transform is a linear transform between vertex spaces. Such a transform must be highly localized in the sense that a vertex in the target surface only draws its values from very few source vertices. This function exploits the localization to create an inverse problem for each vertex. The source neighborhoods for each target vertex are found by using mri_surf2surf to transform the three coordinate maps from the source surface to the target surface, yielding three coordinate values for each target vertex, for which we find the nearest neighbors in the source space. A small number of test images is transformed from source surface to target surface. For each target vertex in the transformed test images, a regression is performed using only the corresponding source image neighborhood, yielding the entries for a sparse matrix encoding the transform. Parameters ========== source_subj: str Freesurfer name of source subject hemi: str in ("lh", "rh") Indicator for hemisphere surface_type: str in ("white", "pial", ...) Indicator for surface layer target_subj: str, default "fsaverage" Freesurfer name of target subject subjects_dir: str, default os.environ["SUBJECTS_DIR"] The freesurfer subjects directory n_neighbors: int, default 20 The size of the neighborhood to take into account when estimating the source support of a vertex random_state: int, default 0 Random number generator or seed for generating test images n_test_images: int, default 40 Number of test images transformed to compute inverse problem. This should be greater than n_neighbors or equal. coef_treshold: float, default 1 / (10 * n_neighbors) Value under which to set a weight to zero in the inverse problem. renormalize: boolean, default True Determines whether the rows of the output matrix should add to 1, implementing what is sensible: a weighted averaging Notes ===== It turns out that freesurfer seems to do the following: For each target vertex, find, on the sphere, the nearest source vertices, and average their values. Try to be as one-to-one as possible. """ source_verts, _, _ = get_surf(source_subj, hemi, surface_type, freesurfer_subject_dir=subjects_dir) transformed_coords = mri_surf2surf(source_verts.T, source_subj, target_subj, hemi, subjects_dir=subjects_dir) kdt = KDTree(source_verts) print("Getting nearest neighbors") distances, indices = kdt.query(transformed_coords.T, k=n_neighbors) print("Done") rng = (np.random.RandomState(random_state) if isinstance(random_state, int) else random_state) test_images = rng.randn(n_test_images, len(source_verts)) transformed_test_images = mri_surf2surf(test_images, source_subj, target_subj, hemi, subjects_dir=subjects_dir) # Solve linear problems to get coefficients all_coefs = [] residuals = [] print("Computing coefficients") i = 0 for target_activation, source_inds in zip( transformed_test_images.T, indices): i += 1 print("{i}".format(i=i), end="\r") source_values = test_images[:, source_inds] r = lstsq(source_values, target_activation, overwrite_a=True, overwrite_b=True) all_coefs.append(r[0]) residuals.append(r[1]) print("Done") all_coefs = np.array(all_coefs) if coef_threshold is None: # we know now that coefs are doing averages coef_threshold = (1 / 10. / n_neighbors ) all_coefs[np.abs(all_coefs) < coef_threshold] = 0 if renormalize: all_coefs /= np.abs(all_coefs).sum(axis=1)[:, np.newaxis] + 1e-10 # there seem to be like 7 vertices that don't constitute an average over # 20 vertices or less, but all the others are such an average. # Let's make a matrix that does the transform: col_indices = indices.ravel() row_indices = (np.arange(indices.shape[0])[:, np.newaxis] * np.ones(indices.shape[1], dtype='int')).ravel() data = all_coefs.ravel() shape = (transformed_coords.shape[1], source_verts.shape[0]) matrix = coo_matrix((data, (row_indices, col_indices)), shape=shape) return matrix
def plot_current_alts_nyear_rule(nyear = 2): start_year = 1981 end_year = 2008 sim_data_folder = "/home/huziy/skynet1_rech3/cordex/CORDEX_DIAG/era40_driven_b1" sim_names = ["ERA40", "MPI","CanESM"] all_data_f = "/home/huziy/skynet1_rech3/cordex/for_Samira" simname_to_path = { "ERA40": os.path.join(all_data_f, "alt_era_b1_yearly.nc"), "MPI": os.path.join(all_data_f, "alt_mpi_b1_yearly.nc"), "CanESM": os.path.join(all_data_f, "alt_canesm_b1_yearly.nc") } coord_file = os.path.join(sim_data_folder, "pmNorthAmerica_0.44deg_ERA40-Int_B1_200812_moyenne") basemap, lons2d, lats2d = draw_regions.get_basemap_and_coords(resolution="c", file_path = coord_file, llcrnrlat=40.0, llcrnrlon=-145, urcrnrlon=-20, urcrnrlat=74 ) assert isinstance(basemap, Basemap) #basemap.transform_scalar() #basemap = Basemap() lons2d[lons2d > 180] -= 360 x, y = basemap(lons2d, lats2d) #x = (x[1:,1:] + x[:-1, :-1]) /2.0 permafrost_mask = draw_regions.get_permafrost_mask(lons2d, lats2d) mask_cond = (permafrost_mask <= 0) | (permafrost_mask >= 3) # plot_utils.apply_plot_params(width_pt=None, width_cm=20, height_cm=40, font_size=25) fig = plt.figure() assert isinstance(fig, Figure) h_max = 10 cmap = my_colormaps.get_lighter_jet_cmap(ncolors=10) #cm.get_cmap("jet",10) bounds = [0,0.1,0.5,1,2,3,5,8,9,10,11] norm = BoundaryNorm(boundaries=bounds,ncolors=len(bounds), clip=True) cmap.set_over(cmap(1.0)) clevels = np.arange(0,h_max+1,1) gs = gridspec.GridSpec(3,1) all_axes = [] all_img = [] i = 0 hc_list = [] hct_list = [] for name in sim_names: path = simname_to_path[name] #select data and needed alt ds = Dataset(path) years = ds.variables["year"][:] hct = ds.variables["alt"][(years >= start_year) & (years <= end_year),:,:] hct_list.append(hct) print("hct.shape = ", hct.shape) #hc = get_alt_using_nyear_rule(hct, nyears = nyear) hc = np.mean(hct, axis = 0) hc_list.append(hc) ax = fig.add_subplot(gs[i,0]) assert isinstance(ax, Axes) hc = np.ma.masked_where(mask_cond | (np.min(hct, axis = 0) < 0), hc) #hc = np.ma.masked_where( (hc < 0), hc) img = basemap.pcolormesh(x, y, hc, cmap = cmap, vmax = h_max, norm=norm) if not i: ax.set_title("ALT, mean ({0} - {1}) \n".format(start_year, end_year)) i += 1 ax.set_ylabel(name) all_axes.append(ax) all_img.append(img) i = 0 axs_to_hide = [] #zones and coastlines for the_ax, the_img in zip(all_axes, all_img): assert isinstance(the_ax, Axes) basemap.drawcoastlines(ax = the_ax, linewidth=0.5) basemap.readshapefile("data/pf_4/permafrost8_wgs84/permaice", name="zone", ax=the_ax, linewidth=1.5) divider = make_axes_locatable(the_ax) cax = divider.append_axes("right", "5%", pad="3%") cb = fig.colorbar(the_img, cax = cax, extend = "max", ticks = bounds) cax.set_title("m \n") if i != 2: axs_to_hide.append(cax) i += 1 fig.tight_layout(w_pad=0.0) for the_ax in axs_to_hide: the_ax.set_visible(False) fig.savefig("alt_mean_current.png") #print ALT for selected points site_names = ["S","K","T"] sel_lons = [-75.646, -65.92, -69.95] sel_lats = [62.197, 58.709, 58.67] xo,yo,zo = lat_lon.lon_lat_to_cartesian(sel_lons, sel_lats) xi, yi, zi = lat_lon.lon_lat_to_cartesian(lons2d.flatten(), lats2d.flatten()) ktree = KDTree(list(zip(xi,yi,zi))) dists, indexes = ktree.query(list(zip(xo,yo,zo))) for name, data, the_hct in zip(sim_names, hc_list, hct_list): print(name) flat_data = data.flatten() for p_name, ind in zip(site_names, indexes): in_data = [] for t in range(the_hct.shape[0]): in_data.append(the_hct[t,:,:].flatten()[ind]) print(",".join(["{0:.1f}".format(float(x)) for x in in_data])) print(p_name, "{0:.1f} m".format(float(flat_data[ind]))) print("--" * 10)
labeltag.ego_agent_pose.frame = FrameId_pb2.GLOBAL labeltag.ego_agent_pose.session_time = tsamp[0] labeltag.ego_agent_linear_velocity.vector.CopyFrom( proto_utils.vectorFromNumpy(vglobalsamp[0])) labeltag.ego_agent_linear_velocity.frame = FrameId_pb2.GLOBAL labeltag.ego_agent_linear_velocity.session_time = tsamp[0] labeltag.ego_agent_angular_velocity.vector.CopyFrom( proto_utils.vectorFromNumpy(angvelsglobal[0])) labeltag.ego_agent_linear_velocity.frame = FrameId_pb2.GLOBAL labeltag.ego_agent_angular_velocity.session_time = tsamp[0] carspinspeed = np.linalg.norm(angvelsglobal[0]) _, iclosest = racelinekdtree.query(carpose[0:3, 3]) istart = (iclosest - int(racelinebuff / 3)) % raceline.shape[0] while np.dot(raceline[istart] - carpose[0:3, 3], carpose[0:3, 0]) < -0.05: istart = istart + 1 istart = istart % raceline.shape[0] rlidx = np.arange( istart, istart + racelinebuff + 1, step=1, dtype=np.int64) % raceline.shape[0] rlglobal = raceline[rlidx] rld = np.hstack([ np.zeros(1), np.cumsum( np.linalg.norm(rlglobal[1:] - rlglobal[:-1], ord=2, axis=1)) ])
class TimeSeriesPlotter: def __init__(self, ax, basemap, lons2d, lats2d, ncVarDict, times, start_date, end_date): """ Plots a vertical profile at the point nearest to the clicked one :type ax: Axes """ assert isinstance(ax, Axes) self.basemap = basemap assert isinstance(self.basemap, Basemap) self.lons_flat = lons2d.flatten() self.lats_flat = lats2d.flatten() self.ncVarDict = ncVarDict self.lons2d = lons2d self.lats2d = lats2d self.counter = 0 self.ax = ax x, y, z = lat_lon.lon_lat_to_cartesian(lons2d.flatten(), lats2d.flatten()) self.kdtree = KDTree(list(zip(x, y, z))) self.sel_time_indices = np.where( [start_date <= t <= end_date for t in times])[0] self.times = times[self.sel_time_indices] ax.figure.canvas.mpl_connect("button_press_event", self) def _get_closest_ij(self, event): lon, lat = self.basemap(event.xdata, event.ydata, inverse=True) x0, y0, z0 = lat_lon.lon_lat_to_cartesian(lon, lat) dist, i = self.kdtree.query((x0, y0, z0)) lon0, lat0 = self.lons_flat[i], self.lats_flat[i] ind = np.where((self.lons2d == lon0) & (self.lats2d == lat0)) ix = ind[0][0] jy = ind[1][0] return ix, jy def _plot_timeseries(self, ax, ix, jy): fig_daily = plt.figure() ax_daily = plt.gca() fig_monthly = plt.figure() ax_monthly = plt.gca() for varName, ncVar in self.ncVarDict.items(): sel_values = ncVar[self.sel_time_indices, 0, ix, jy] ax.plot(self.times, sel_values, label=varName) #calculate and plot daily means ts = pd.TimeSeries(index=self.times, data=sel_values) ts = ts.resample("D", how="mean") ax_daily.plot(ts.index, ts.values, label=varName) #calculate and plot monthly means ts = ts.resample("M", how="mean") ax_monthly.plot(ts.index, ts.values, label=varName) ax.legend() ax.set_title(str(self.counter)) ax_daily.legend() ax_daily.set_title(str(self.counter) + " - daily") ax_monthly.legend() ax_monthly.set_title(str(self.counter) + " - monthly") assert isinstance(ax, Axes) def __call__(self, event): print(event.xdata, event.ydata) print(event.button) if event.button != 3: return ix, jy = self._get_closest_ij(event) fig = plt.figure() sounding_ax = fig.add_subplot(1, 1, 1) self._plot_timeseries(sounding_ax, ix, jy) self.ax.annotate(str(self.counter), (event.xdata, event.ydata), font_properties=FontProperties(size=10)) self.ax.redraw_in_frame() self.counter += 1 assert isinstance(fig, Figure) plt.show()
class GldasManager(): def __init__(self, folder_path="/home/huziy/skynet3_exec1/gldas_data"): """ Data access interface to the folder of netcdf files runoff units: kg/m^2/s = mm/s """ self.data_folder = folder_path self.surface_rof_varname = "Qs_GDS0_SFC_ave4h" self.subsurface_rof_varname = "Qsb_GDS0_SFC_ave4h" self.date_format = "%m/%d/%Y (%H:%M)" self._init_date_to_path_dict() self._init_kd_tree() pass def plot_subsrof_ts(self, i=0, j=0): all_dates = list(sorted(self.date_to_path.keys())) vals = [ self.get_field_for_date(x, var_name=self.subsurface_rof_varname)[i, j] for x in all_dates ] vals1 = [ self.get_field_for_date(x, var_name=self.surface_rof_varname)[i, j] for x in all_dates ] print(min(vals), max(vals)) dates_num = date2num(all_dates) print(min(dates_num), max(dates_num)) import matplotlib.pyplot as plt plt.figure() plt.plot(dates_num, vals, label="subsurf rof") plt.plot(dates_num, vals1, label="surf rof") plt.legend() #plt.xticks(rotation='vertical') plt.show() def _init_date_to_path_dict(self): self.date_to_path = {} for fName in os.listdir(self.data_folder): if not fName.endswith(".nc"): continue #regard only nectdf files path = os.path.join(self.data_folder, fName) ds = Dataset(path) srofVar = ds.variables[self.surface_rof_varname] date = datetime.strptime(srofVar.initial_time, self.date_format) self.date_to_path[date] = path ds.close() def _init_kd_tree(self): """ Has to be called after self._init_date_to_path_dict """ if not len(self.date_to_path): print("You should call {0} first".format( "self._init_date_to_path_dict")) raise Exception() for d, path in self.date_to_path.items(): ds = Dataset(path) lons1d = ds.variables["g0_lon_1"][:] lats1d = ds.variables["g0_lat_0"][:] self.lats2d, self.lons2d = np.meshgrid(lats1d, lons1d) x, y, z = lat_lon.lon_lat_to_cartesian(self.lons2d.flatten(), self.lats2d.flatten()) self.kdtree = KDTree(list(zip(x, y, z))) return pass def get_field_for_month_and_year(self, var_name="", month=None, year=None): d1 = datetime(year=year, month=month, day=1) path = self.date_to_path[d1] ds = Dataset(path) return ds.variables[var_name][:] def get_field_for_date(self, the_date, var_name=""): path = self.date_to_path[the_date] ds = Dataset(path) data = ds.variables[var_name][:].transpose( ) # transpose because I allways use (lon, lat) order of coordinates ds.close() return data def get_srof_spat_integrals_over_points_in_time(self, lons2d_target, lats2d_target, mask, areas2d, start_date=None, end_date=None): return self._get_spatial_integrals_over_points_in_time( lons2d_target, lats2d_target, mask, areas2d, start_date=start_date, end_date=end_date, var_name=self.surface_rof_varname) def get_subsrof_spat_integrals_over_points_in_time(self, lons2d_target, lats2d_target, mask, areas2d, start_date=None, end_date=None): return self._get_spatial_integrals_over_points_in_time( lons2d_target, lats2d_target, mask, areas2d, start_date=start_date, end_date=end_date, var_name=self.subsurface_rof_varname) def _get_spatial_integrals_over_points_in_time(self, lons2d_target, lats2d_target, mask, areas2d, start_date=None, end_date=None, var_name=""): """ i) Interpolate to the grid (lons2d_target, lats2d_target) ii) Apply the mask to the interpoated fields and sum with coefficients from areas2d Note: the interpolation is done using nearest neighbor approach returns a timeseries of {t -> sum(Ai[mask]*xi[mask])(t)} """ #interpolation x1, y1, z1 = lat_lon.lon_lat_to_cartesian(lons2d_target.flatten(), lats2d_target.flatten()) dists, indices = self.kdtree.query(list(zip(x1, y1, z1))) mask1d = mask.flatten().astype(int) areas1d = areas2d.flatten() result = {} for the_date in list(self.date_to_path.keys()): if start_date is not None: if start_date > the_date: continue if end_date is not None: if end_date < the_date: continue data = self.get_field_for_date(the_date, var_name=var_name) result[the_date] = np.sum(data.flatten()[indices][mask1d == 1] * areas1d[mask1d == 1]) times = list(sorted(result.keys())) values = [result[x] for x in times] print("nvals, min, max", len(values), min(values), max(values)) return TimeSeries(time=times, data=values)
def lazy_prm(start_conf, end_conf, sample_fn, extend_fn, collision_fn, num_samples=100, max_degree=10, weights=None, p_norm=2, max_distance=INF, approximate_eps=0.0, max_cost=INF, max_time=INF, max_paths=INF): # TODO: multi-query motion planning start_time = time.time() # TODO: can embed pose and/or points on the robot for other distances if weights is None: weights = np.ones(len(start_conf)) embed_fn = lambda q: weights * q distance_fn = lambda q1, q2: np.linalg.norm(embed_fn(q2) - embed_fn(q1), ord=p_norm) cost_fn = lambda v1, v2: distance_fn(samples[v1], samples[v2]) # TODO: can compute cost between waypoints from extend_fn samples = [] while len(samples) < num_samples: conf = sample_fn() if (distance_fn(start_conf, conf) + distance_fn(conf, end_conf)) < max_cost: samples.append(conf) start_index, end_index = 0, 1 samples[start_index] = start_conf samples[end_index] = end_conf embedded = list(map(embed_fn, samples)) kd_tree = KDTree(embedded) vertices = list(range(len(samples))) edges = set() for v1 in vertices: # TODO: could dynamically compute distances distances, neighbors = kd_tree.query(embedded[v1], k=max_degree + 1, eps=approximate_eps, p=p_norm, distance_upper_bound=max_distance) for d, v2 in zip(distances, neighbors): if (d < max_distance) and (v1 != v2): edges.update([(v1, v2), (v2, v1)]) neighbors_from_index = {v: set() for v in vertices} for v1, v2 in edges: neighbors_from_index[v1].add(v2) #print(time.time() - start_time, len(edges), float(len(edges))/len(samples)) colliding_vertices, colliding_edges = {}, {} def neighbors_fn(v1): for v2 in neighbors_from_index[v1]: if not (colliding_vertices.get(v2, False) or colliding_edges.get( (v1, v2), False)): yield v2 visited = dijkstra(end_index, neighbors_fn, cost_fn) heuristic_fn = lambda v: visited[v].g if v in visited else INF while elapsed_time(start_time) < max_time: # TODO: extra cost to prioritize reusing checked edges path = wastar_search(start_index, end_index, neighbors_fn=neighbors_fn, cost_fn=cost_fn, heuristic_fn=heuristic_fn, max_cost=max_cost, max_time=max_time - elapsed_time(start_time)) if path is None: return None, edges, colliding_vertices, colliding_edges cost = sum(cost_fn(v1, v2) for v1, v2 in zip(path, path[1:])) print( 'Length: {} | Cost: {:.3f} | Vertices: {} | Edges: {} | Time: {:.3f}' .format(len(path), cost, len(colliding_vertices), len(colliding_edges), elapsed_time(start_time))) if check_path(path, colliding_vertices, colliding_edges, samples, extend_fn, collision_fn): break solution = [start_conf] for q1, q2 in zip(path, path[1:]): solution.extend(extend_fn(samples[q1], samples[q2])) return solution, samples, edges, colliding_vertices, colliding_edges
selectedTrain = selectedArray[np.where(train != 0)[0], :] selectedTest = selectedArray[np.where(test != 0)[0], :] ####################################################################################################################### # Spatial KD-tree method t0 = time.time() tree = KDTree(selectedTrain) print(time.time() - t0) found = np.zeros((selectedTest.shape[0], k), dtype=np.int) # Klabels = np.zeros_like(found) kNN = np.zeros((selectedTest.shape[0], 1), dtype=np.int) for e in range(selectedTest.shape[0]): found[e, :] = tree.query(selectedTest[e, :], k=k)[1] # Klabels[e, :] = train_non_0[found[e, :]].ravel() kNN[e] = np.argmax(np.bincount(train_non_0[found[e, :]].ravel())) print('from scratch knn method') print(time.time() - t0) print(accuracy_score(test_non_0, kNN.ravel())) print(classification_report(test_non_0, kNN.ravel())) ####################################################################################################################### # Scikit-Learn distance method (Actually it's not a method. It is using another method itself.) t0 = time.time() eDist = euclidean_distances(selectedTest, selectedTrain) sorted_eDist = np.argsort(eDist, axis=1) knn_a = np.reshape(train_non_0[sorted_eDist][:, :5, :],
class SoundingPlotter: def __init__(self, ax, basemap, tmin_3d, tmax_3d, lons2d, lats2d, levelheights=None): """ Plots a vertical profile at the point nearest to the clicked one :type ax: Axes """ assert isinstance(ax, Axes) self.basemap = basemap assert isinstance(self.basemap, Basemap) self.tmin_3d = tmin_3d self.tmax_3d = tmax_3d self.lons2d = lons2d self.lats2d = lats2d self.T0 = 273.15 self.lons_flat = lons2d.flatten() self.lats_flat = lats2d.flatten() self.level_heights = levelheights self.counter = 0 self.ax = ax x, y, z = lat_lon.lon_lat_to_cartesian(lons2d.flatten(), lats2d.flatten()) self.kdtree = KDTree(list(zip(x, y, z))) ax.figure.canvas.mpl_connect("button_press_event", self) pass def _get_closest_ij(self, event): lon, lat = self.basemap(event.xdata, event.ydata, inverse=True) x0, y0, z0 = lat_lon.lon_lat_to_cartesian(lon, lat) dist, i = self.kdtree.query((x0, y0, z0)) lon0, lat0 = self.lons_flat[i], self.lats_flat[i] ind = np.where((self.lons2d == lon0) & (self.lats2d == lat0)) ix = ind[0][0] jy = ind[1][0] return ix, jy def _plot_sounding(self, ax, ix, jy): ax.plot(self.tmax_3d[ix, jy, :] - self.T0, self.level_heights, color="r") ax.plot(self.tmin_3d[ix, jy, :] - self.T0, self.level_heights, color="b") ax.plot([0, 0], [self.level_heights[0], self.level_heights[-1]], color="k") ax.set_title(str(self.counter)) assert isinstance(ax, Axes) ax.invert_yaxis() def __call__(self, event): print(event.xdata, event.ydata) print(event.button) if event.button != 3: return ix, jy = self._get_closest_ij(event) fig = plt.figure() sounding_ax = fig.add_subplot(1, 1, 1) self._plot_sounding(sounding_ax, ix, jy) self.ax.annotate(str(self.counter), (event.xdata, event.ydata), font_properties=FontProperties(size=10)) self.ax.redraw_in_frame() self.counter += 1 assert isinstance(fig, Figure) plt.show() pass
def interpolate_to_amno(data_folder, start_year=1970, end_year=1999, rcm="", gcm="", out_folder=""): print "data_folder: {0}".format(data_folder) #check if the result file already exists sim_folder = os.path.join(out_folder, "{0}-{1}_{2}-{3}".format(gcm, rcm, start_year, end_year)) #create a folder for each simulation if not os.path.isdir(sim_folder): os.mkdir(sim_folder) out_path = os.path.join(sim_folder, "narccap_runoff_{0}-{1}_{2}-{3}.nc".format(start_year, end_year, gcm, rcm)) if os.path.isfile(out_path): print("{0} already exists, remove if you want to recreate.".format(out_path)) return srof_pattern = os.path.join(data_folder, "mrros_*_*_*.nc") trof_pattern = os.path.join(data_folder, "mrro_*_*_*.nc") srof_ds = MFDataset(srof_pattern) trof_ds = MFDataset(trof_pattern) lon_in = srof_ds.variables["lon"][:] lat_in = srof_ds.variables["lat"][:] x_in, y_in, z_in = lat_lon.lon_lat_to_cartesian(lon_in.flatten(), lat_in.flatten()) tree = KDTree(zip(x_in, y_in, z_in)) lon_out, lat_out = polar_stereographic.lons.flatten(), polar_stereographic.lats.flatten() x_out, y_out, z_out = lat_lon.lon_lat_to_cartesian(lon_out, lat_out) distances, indices = tree.query(zip(x_out, y_out, z_out)) time_var = srof_ds.variables["time"] time_in_units = time_var[:] times = num2date(time_in_units, time_var.units) time_indices = np.where( np.array(map(lambda x: start_year <= x.year <= end_year, times), dtype=np.bool) )[0] srof_sub = srof_ds.variables["mrros"][time_indices, :, :] trof_sub = trof_ds.variables["mrro"][time_indices, :, :] times_sub = itertools.ifilter(lambda x: start_year <= x.year <= end_year, times) print("selected time window data") #writing result to netcdf out_nc = Dataset(out_path, "w") out_nc.createDimension("x", polar_stereographic.lons.shape[0]) out_nc.createDimension("y", polar_stereographic.lats.shape[1]) out_nc.createDimension("time") srof_var = out_nc.createVariable("mrros", "f4", dimensions=("time", "x", "y")) trof_var = out_nc.createVariable("mrro", "f4", dimensions=("time", "x", "y")) assert isinstance(srof_var, Variable) srof_in_var = srof_ds.variables["mrros"] for attr_name in srof_in_var.ncattrs(): print attr_name srof_var.setncattr(attr_name, getattr(srof_in_var, attr_name)) trof_in_var = trof_ds.variables["mrro"] for attr_name in trof_in_var.ncattrs(): print attr_name trof_var.setncattr(attr_name, getattr(trof_in_var, attr_name)) t_var = out_nc.createVariable("time", "f4", dimensions=("time",)) lon_var = out_nc.createVariable("longitude", "f4", dimensions=( "x", "y")) lat_var = out_nc.createVariable("latitude", "f4", dimensions=("x", "y")) t_var.units = time_var.units print("interpolating and saving data to netcdf file") nrows, ncols = polar_stereographic.lons.shape #interpolate in time if necessary n_interps = 0 for i, t in enumerate(times_sub): sr_slice = srof_sub[i, :, :].flatten() tr_slice = trof_sub[i, :, :].flatten() trof1 = tr_slice[indices].reshape(nrows, ncols) srof1 = sr_slice[indices].reshape(nrows, ncols) if hasattr(trof1, "mask") and np.all(trof1.mask): trof1 = trof_var[i - 1, :, :] n_interps += 1 if hasattr(srof1, "mask") and np.all(srof1.mask): srof1 = srof_var[i - 1, :, :] trof_var[i, :, :] = trof1 srof_var[i, :, :] = srof1 t_var[i] = date2num(t, time_var.units) print "Number of interpolations in time: {0}".format(n_interps) lon_var[:] = polar_stereographic.lons lat_var[:] = polar_stereographic.lats out_nc.close()