def scale(args): """ Draws the variable scale that is placed over the map. Returns a BytesIO object. """ dataset_name = args.get("dataset") config = DatasetConfig(dataset_name) scale = args.get("scale") scale = [float(component) for component in scale.split(",")] variable = args.get("variable") variable = variable.split(",") if len(variable) > 1: variable_unit = config.variable[",".join(variable)].unit variable_name = config.variable[",".join(variable)].name else: variable_unit = config.variable[variable[0]].unit variable_name = config.variable[variable[0]].name cmap = colormap.find_colormap(variable_name) if len(variable) == 2: cmap = colormap.colormaps.get("speed") fig = plt.figure(figsize=(2, 5), dpi=75) ax = fig.add_axes([0.05, 0.05, 0.25, 0.9]) norm = matplotlib.colors.Normalize(vmin=scale[0], vmax=scale[1]) formatter = ScalarFormatter() formatter.set_powerlimits((-3, 4)) bar = ColorbarBase(ax, cmap=cmap, norm=norm, orientation="vertical", format=formatter) if variable_name == "Potential Sub Surface Channel": bar.set_ticks([0, 1], True) bar.set_label("%s (%s)" % (variable_name.title(), utils.mathtext(variable_unit)), fontsize=12) # Increase tick font size bar.ax.tick_params(labelsize=12) buf = BytesIO() plt.savefig( buf, format="png", dpi="figure", transparent=False, bbox_inches="tight", pad_inches=0.05, ) plt.close(fig) buf.seek(0) # Move buffer back to beginning return buf
def scale(args): dataset_name = args.get('dataset') config = DatasetConfig(dataset_name) scale = args.get('scale') scale = [float(component) for component in scale.split(',')] variable = args.get('variable') variable = variable.split(',') with open_dataset(config) as dataset: if len(variable) > 1: variable_unit = config.variable[",".join(variable)].unit variable_name = config.variable[",".join(variable)].name else: variable_unit = config.variable[dataset.variables[ variable[0]]].unit variable_name = config.variable[dataset.variables[ variable[0]]].name cmap = colormap.find_colormap(variable_name) if len(variable) == 2: cmap = colormap.colormaps.get('speed') fig = plt.figure(figsize=(2, 5), dpi=75) ax = fig.add_axes([0.05, 0.05, 0.25, 0.9]) norm = matplotlib.colors.Normalize(vmin=scale[0], vmax=scale[1]) formatter = ScalarFormatter() formatter.set_powerlimits((-3, 4)) bar = ColorbarBase(ax, cmap=cmap, norm=norm, orientation='vertical', format=formatter) bar.set_label("%s (%s)" % (variable_name.title(), utils.mathtext(variable_unit)), fontsize=12) # Increase tick font size bar.ax.tick_params(labelsize=12) buf = BytesIO() plt.savefig(buf, format='png', dpi='figure', transparent=False, bbox_inches='tight', pad_inches=0.05) plt.close(fig) buf.seek(0) # Move buffer back to beginning return buf
def load_data(self): def find_depth(depth, clip_length, dataset): """ Calculates and returns the depth, depth-value, and depth unit from a given dataset Args: * depth: Stored depth information (self.depth or self.compare['depth']) * clip_length: How many depth values to clip (usually len(dataset.depths) - 1) * dataset: Opened dataset Returns: (depth, depth_value, depth_unit) """ depth_value = 0 depth_unit = "m" if depth: if depth == "bottom": depth_value = "Bottom" depth_unit = "" return (depth, depth_value, depth_unit) else: depth = np.clip(int(depth), 0, clip_length) depth_value = np.round(dataset.depths[depth]) depth_unit = "m" return (depth, depth_value, depth_unit) return (depth, depth_value, depth_unit) # Load left/Main Map with open_dataset( self.dataset_config, timestamp=self.starttime, endtime=self.endtime, variable=self.variables, ) as dataset: self.depth, self.depth_value, self.depth_unit = find_depth( self.depth, len(dataset.depths) - 1, dataset) self.path_points, self.distance, times, data = dataset.get_path( self.points, self.depth, self.variables[0], self.starttime, self.endtime, tile_time=False, ) self.variable_name = self.get_variable_names( dataset, self.variables)[0] variable_units = self.get_variable_units(dataset, self.variables) self.variable_unit = variable_units[0] self.data = data.T self.iso_timestamps = times # Get colourmap if self.cmap is None: self.cmap = colormap.find_colormap(self.variable_name) # Load data sent from Right Map (if in compare mode) if self.compare: compare_config = DatasetConfig(self.compare["dataset"]) with open_dataset( compare_config, timestamp=self.compare["starttime"], endtime=self.compare["endtime"], variable=self.compare["variables"], ) as dataset: ( self.compare["depth"], self.compare["depth_value"], self.compare["depth_unit"], ) = find_depth(self.compare["depth"], len(dataset.depths) - 1, dataset) path, distance, times, data = dataset.get_path( self.points, self.compare["depth"], self.compare["variables"][0], self.compare["starttime"], self.compare["endtime"], tile_time=False, ) self.compare["variable_name"] = self.get_variable_names( dataset, self.compare["variables"])[0] # Colourmap if self.compare["colormap"] == "default": self.compare["colormap"] = colormap.find_colormap( self.compare["variable_name"]) else: self.compare["colormap"] = colormap.find_colormap( self.compare["colormap"]) variable_units = self.get_variable_units( dataset, self.compare["variables"]) self.compare["variable_unit"] = variable_units[0] self.compare["data"] = data.T self.compare["times"] = times
def plot(self): def get_depth_label(depthValue, depthUnit): if depthValue == "bottom": return " at Bottom" return " at %s %s" % (depthValue, depthUnit) # Figure size figuresize = list(map(float, self.size.split("x"))) # Vertical scaling of figure figuresize[1] *= 1.5 if self.compare else 1 fig = plt.figure(figsize=figuresize, dpi=self.dpi) if self.showmap: width = 2 # 2 columns width_ratios = [2, 7] else: width = 1 # 1 column width_ratios = [1] # Setup grid (rows, columns, column/row ratios) depending on view mode if self.compare: # Don't show a difference plot if variables are different if self.compare["variables"][0] == self.variables[0]: gs = gridspec.GridSpec(3, width, width_ratios=width_ratios, height_ratios=[1, 1, 1]) else: gs = gridspec.GridSpec(2, width, width_ratios=width_ratios, height_ratios=[1, 1]) else: gs = gridspec.GridSpec(1, width, width_ratios=width_ratios) if self.showmap: # Plot the path on a map utils.path_plot(self.path_points, gs[:, 0]) # Calculate variable range if self.scale: vmin = self.scale[0] vmax = self.scale[1] else: vmin, vmax = utils.normalize_scale( self.data, self.dataset_config.variable[self.variables[0]]) if len(self.variables) > 1: vmin = 0 # Render self._hovmoller_plot( gs, [0, 1], [0, 0], gettext(self.variable_name), vmin, vmax, self.data, self.iso_timestamps, self.cmap, self.variable_unit, gettext(self.variable_name) + gettext(get_depth_label(self.depth_value, self.depth_unit)), ) # If in compare mode if self.compare: # Calculate variable range if self.compare["scale"]: vmin = self.compare["scale"][0] vmax = self.compare["scale"][1] else: vmin = np.amin(self.compare["data"]) vmax = np.amax(self.compare["data"]) if np.any([ re.search(x, self.compare["variable_name"], re.IGNORECASE) for x in ["velocity", "surface height", "wind"] ]): vmin = min(vmin, -vmax) vmax = max(vmax, -vmin) if len(self.compare["variables"]) > 1: vmin = 0 self._hovmoller_plot( gs, [1, 1], [1, 0], gettext(self.compare["variable_name"]), vmin, vmax, self.compare["data"], self.compare["times"], self.compare["colormap"], self.compare["variable_unit"], gettext(self.compare["variable_name"]) + gettext( get_depth_label(self.compare["depth"], self.compare["depth_unit"])), ) # Difference plot if self.compare["variables"][0] == self.variables[0]: data_difference = self.data - self.compare["data"] vmin = np.amin(data_difference) vmax = np.amax(data_difference) self._hovmoller_plot( gs, [2, 1], [2, 0], gettext(self.compare["variable_name"]), vmin, vmax, data_difference, self.compare["times"], colormap.find_colormap("anomaly"), self.compare["variable_unit"], gettext(self.compare["variable_name"]) + gettext(" Difference") + gettext( get_depth_label(self.compare["depth"], self.compare["depth_unit"])), ) # Image title if self.plotTitle: fig.suptitle(gettext("Hovm\xf6ller Diagram(s) for:\n%s") % (self.name), fontsize=15) else: fig.suptitle(self.plotTitle, fontsize=15) # Subplot padding fig.tight_layout(pad=0, w_pad=4, h_pad=2) fig.subplots_adjust(top=0.9 if self.compare else 0.85) return super(HovmollerPlotter, self).plot(fig)
def load_data(self): def find_depth(depth, clip_length, dataset): """ Calculates and returns the depth, depth-value, and depth unit from a given dataset Args: * depth: Stored depth information (self.depth or self.compare['depth']) * clip_length: How many depth values to clip (usually len(dataset.depths) - 1) * dataset: Opened dataset Returns: (depth, depth_value, depth_unit) """ depth_value = 0 depth_unit = "m" if depth: if depth == 'bottom': depth_value = 'Bottom' depth_unit = '' return (depth, depth_value, depth_unit) else: depth = np.clip(int(depth), 0, clip_length) depth_value = np.round(dataset.depths[depth]) depth_unit = "m" return (depth, depth_value, depth_unit) return (depth, depth_value, depth_unit) # Load left/Main Map with open_dataset(self.dataset_config, timestamp=self.starttime, endtime=self.endtime, variable=self.variables) as dataset: self.depth, self.depth_value, self.depth_unit = find_depth( self.depth, len(dataset.depths) - 1, dataset) self.path_points, self.distance, times, data = dataset.get_path( self.points, self.depth, self.variables[0], self.starttime, self.endtime, tile_time=False) self.variable_name = self.get_variable_names( dataset, self.variables)[0] variable_units = self.get_variable_units(dataset, self.variables) scale_factors = self.get_variable_scale_factors( dataset, self.variables) self.variable_unit = variable_units[0] self.data = np.multiply(data, scale_factors[0]).T self.iso_timestamps = times # Get colourmap if self.cmap is None: self.cmap = colormap.find_colormap(self.variable_name) # Load data sent from Right Map (if in compare mode) if self.compare: compare_config = DatasetConfig(self.compare['dataset']) with open_dataset(compare_config, timestamp=self.compare['starttime'], endtime=self.compare['endtime'], variable=self.compare['variables']) as dataset: self.compare['depth'], self.compare[ 'depth_value'], self.compare['depth_unit'] = find_depth( self.compare['depth'], len(dataset.depths) - 1, dataset) path, distance, times, data = dataset.get_path( self.points, self.compare['depth'], self.compare['variables'][0], self.compare['starttime'], self.compare['endtime'], tile_time=False) self.compare['variable_name'] = self.get_variable_names( dataset, self.compare['variables'])[0] # Colourmap if (self.compare['colormap'] == 'default'): self.compare['colormap'] = colormap.find_colormap( self.compare['variable_name']) else: self.compare['colormap'] = colormap.find_colormap( self.compare['colormap']) variable_units = self.get_variable_units( dataset, self.compare['variables']) scale_factors = self.get_variable_scale_factors( dataset, self.compare['variables']) self.compare['variable_unit'] = variable_units[0] self.compare['data'] = np.multiply(data, scale_factors[0]).T self.compare['times'] = times
def plot(self): if self.filetype == 'geotiff': f, fname = tempfile.mkstemp() os.close(f) driver = gdal.GetDriverByName('GTiff') outRaster = driver.Create(fname, self.latitude.shape[1], self.longitude.shape[0], 1, gdal.GDT_Float64) x = [self.longitude[0, 0], self.longitude[-1, -1]] y = [self.latitude[0, 0], self.latitude[-1, -1]] outRasterSRS = osr.SpatialReference() x, y = self.basemap(x, y) outRasterSRS.ImportFromProj4(self.basemap.proj4string) pixelWidth = (x[-1] - x[0]) / self.longitude.shape[0] pixelHeight = (y[-1] - y[0]) / self.latitude.shape[0] outRaster.SetGeoTransform( (x[0], pixelWidth, 0, y[0], 0, pixelHeight)) outband = outRaster.GetRasterBand(1) d = self.data.astype("Float64") ndv = d.fill_value outband.WriteArray(d.filled(ndv)) outband.SetNoDataValue(ndv) outRaster.SetProjection(outRasterSRS.ExportToWkt()) outband.FlushCache() outRaster = None with open(fname, 'r', encoding="latin-1") as f: buf = f.read() os.remove(fname) return (buf, self.mime, self.filename.replace(".geotiff", ".tif")) # Figure size figuresize = list(map(float, self.size.split("x"))) fig = plt.figure(figsize=figuresize, dpi=self.dpi) ax = plt.gca() if self.scale: vmin = self.scale[0] vmax = self.scale[1] else: vmin, vmax = utils.normalize_scale( self.data, self.dataset_config.variable[f"{self.variables[0]}"]) c = self.basemap.imshow(self.data, vmin=vmin, vmax=vmax, cmap=self.cmap) if len(self.quiver_data) == 2: qx, qy = self.quiver_data qx, qy, x, y = self.basemap.rotate_vector(qx, qy, self.quiver_longitude, self.quiver_latitude, returnxy=True) qx = np.ma.masked_where(np.ma.getmask(self.quiver_data[0]), qx) qy = np.ma.masked_where(np.ma.getmask(self.quiver_data[1]), qy) if self.quiver['magnitude'] != 'length': qx = qx / self.quiver_magnitude qy = qy / self.quiver_magnitude qscale = 50 else: qscale = None if self.quiver['magnitude'] == 'color': if self.quiver['colormap'] is None or \ self.quiver['colormap'] == 'default': qcmap = colormap.colormaps.get('speed') else: qcmap = colormap.colormaps.get(self.quiver['colormap']) q = self.basemap.quiver( x, y, qx, qy, self.quiver_magnitude, width=0.0035, headaxislength=4, headlength=4, scale=qscale, pivot='mid', cmap=qcmap, ) else: q = self.basemap.quiver( x, y, qx, qy, width=0.0025, headaxislength=4, headlength=4, scale=qscale, pivot='mid', ) if self.quiver['magnitude'] == 'length': unit_length = np.mean(self.quiver_magnitude) * 2 unit_length = np.round(unit_length, -int(np.floor(np.log10(unit_length)))) if unit_length >= 1: unit_length = int(unit_length) plt.quiverkey(q, .65, .01, unit_length, self.quiver_name.title() + " " + str(unit_length) + " " + utils.mathtext(self.quiver_unit), coordinates='figure', labelpos='E') if self.show_bathymetry: # Plot bathymetry on top cs = self.basemap.contour( self.longitude, self.latitude, self.bathymetry, latlon=True, linewidths=0.5, norm=LogNorm(vmin=1, vmax=6000), cmap='Greys', levels=[100, 200, 500, 1000, 2000, 3000, 4000, 5000, 6000]) plt.clabel(cs, fontsize='x-large', fmt='%1.0fm') if self.area and self.show_area: for a in self.area: polys = [] for co in a['polygons'] + a['innerrings']: coords = np.array(co).transpose() mx, my = self.basemap(coords[1], coords[0]) map_coords = list(zip(mx, my)) polys.append(Polygon(map_coords)) paths = [] for poly in polys: paths.append(poly.get_path()) path = concatenate_paths(paths) poly = PathPatch(path, fill=None, edgecolor='#ffffff', linewidth=5) plt.gca().add_patch(poly) poly = PathPatch(path, fill=None, edgecolor='k', linewidth=2) plt.gca().add_patch(poly) if self.names is not None and len(self.names) > 1: for idx, name in enumerate(self.names): x, y = self.basemap(self.centroids[idx].y, self.centroids[idx].x) plt.annotate( xy=(x, y), s=name, ha='center', va='center', size=12, # weight='bold' ) if len(self.contour_data) > 0: if (self.contour_data[0].min() != self.contour_data[0].max()): cmin, cmax = utils.normalize_scale( self.contour_data[0], self.dataset_config.variable[self.contour['variable']]) levels = None if self.contour.get('levels') is not None and \ self.contour['levels'] != 'auto' and \ self.contour['levels'] != '': try: levels = list( set([ float(xx) for xx in self.contour['levels'].split(",") if xx.strip() ])) levels.sort() except ValueError: pass if levels is None: levels = np.linspace(cmin, cmax, 5) cmap = self.contour['colormap'] if cmap is not None: cmap = colormap.colormaps.get(cmap) if cmap is None: cmap = colormap.find_colormap(self.contour_name) if not self.contour.get('hatch'): contours = self.basemap.contour(self.longitude, self.latitude, self.contour_data[0], latlon=True, linewidths=2, levels=levels, cmap=cmap) else: hatches = [ '//', 'xx', '\\\\', '--', '||', '..', 'oo', '**' ] if len(levels) + 1 < len(hatches): hatches = hatches[0:len(levels) + 2] self.basemap.contour(self.longitude, self.latitude, self.contour_data[0], latlon=True, linewidths=1, levels=levels, colors='k') contours = self.basemap.contourf(self.longitude, self.latitude, self.contour_data[0], latlon=True, colors=['none'], levels=levels, hatches=hatches, vmin=cmin, vmax=cmax, extend='both') if self.contour['legend']: handles, l = contours.legend_elements() labels = [] for i, lab in enumerate(l): if self.contour.get('hatch'): if self.contour_unit == 'fraction': if i == 0: labels.append( "$x \\leq {0: .0f}\\%$".format( levels[i] * 100)) elif i == len(levels): labels.append("$x > {0: .0f}\\%$".format( levels[i - 1] * 100)) else: labels.append( "${0:.0f}\\% < x \\leq {1:.0f}\\%$". format(levels[i - 1] * 100, levels[i] * 100)) else: if i == 0: labels.append("$x \\leq %.3g$" % levels[i]) elif i == len(levels): labels.append("$x > %.3g$" % levels[i - 1]) else: labels.append("$%.3g < x \\leq %.3g$" % (levels[i - 1], levels[i])) else: if self.contour_unit == 'fraction': labels.append("{0:.0%}".format(levels[i])) else: labels.append( "%.3g %s" % (levels[i], utils.mathtext(self.contour_unit))) ax = plt.gca() if self.contour_unit != 'fraction' and not \ self.contour.get('hatch'): contour_title = "%s (%s)" % (self.contour_name, utils.mathtext( self.contour_unit)) else: contour_title = self.contour_name leg = ax.legend(handles[::-1], labels[::-1], loc='lower left', fontsize='medium', frameon=True, framealpha=0.75, title=contour_title) leg.get_title().set_fontsize('medium') if not self.contour.get('hatch'): for legobj in leg.legendHandles: legobj.set_linewidth(3) # Map Info self.basemap.drawmapboundary(fill_color=(0.3, 0.3, 0.3), zorder=-1) self.basemap.drawcoastlines(linewidth=0.5) self.basemap.fillcontinents(color='grey', lake_color='dimgrey') def find_lines(values): if np.amax(values) - np.amin(values) < 1: return [values.mean()] elif np.amax(values) - np.amin(values) < 25: return np.round( np.arange(np.amin(values), np.amax(values), round(np.amax(values) - np.amin(values)) / 5)) else: return np.arange(round(np.amin(values), -1), round(np.amax(values), -1), 5) parallels = find_lines(self.latitude) meridians = find_lines(self.longitude) self.basemap.drawparallels(parallels, labels=[1, 0, 0, 0], color=(0, 0, 0, 0.5)) self.basemap.drawmeridians(meridians, labels=[0, 0, 0, 1], color=(0, 0, 0, 0.5), latmax=85) title = self.plotTitle if self.plotTitle is None or self.plotTitle == "": area_title = "\n".join(wrap(", ".join(self.names), 60)) + "\n" title = "%s %s %s, %s" % (area_title, self.variable_name.title(), self.depth_label, self.date_formatter(self.timestamp)) plt.title(title.strip()) ax = plt.gca() divider = make_axes_locatable(ax) cax = divider.append_axes("right", size="5%", pad=0.05) bar = plt.colorbar(c, cax=cax) bar.set_label( "%s (%s)" % (self.variable_name.title(), utils.mathtext(self.variable_unit)), fontsize=14) if self.quiver is not None and \ self.quiver['variable'] != '' and \ self.quiver['variable'] != 'none' and \ self.quiver['magnitude'] == 'color': bax = divider.append_axes("bottom", size="5%", pad=0.35) qbar = plt.colorbar(q, orientation='horizontal', cax=bax) qbar.set_label(self.quiver_name.title() + " " + utils.mathtext(self.quiver_unit), fontsize=14) fig.tight_layout(pad=3, w_pad=4) return super(MapPlotter, self).plot(fig)
def plot(projection, x, y, z, args): lat, lon = get_latlon_coords(projection, x, y, z) if len(lat.shape) == 1: lat, lon = np.meshgrid(lat, lon) dataset_name = args.get('dataset') variable = args.get('variable') if variable.endswith('_anom'): variable = variable[0:-5] anom = True else: anom = False variable = variable.split(',') depth = args.get('depth') scale = args.get('scale') scale = [float(component) for component in scale.split(',')] data = [] with open_dataset(get_dataset_url(dataset_name)) as dataset: if args.get('time') is None or (type(args.get('time')) == str and len(args.get('time')) == 0): time = -1 else: time = int(args.get('time')) t_len = len(dataset.timestamps) while time >= t_len: time -= t_len while time < 0: time += len(dataset.timestamps) timestamp = dataset.timestamps[time] for v in variable: data.append( dataset.get_area(np.array([lat, lon]), depth, time, v, args.get('interp'), args.get('radius'), args.get('neighbours'))) variable_name = get_variable_name(dataset_name, dataset.variables[variable[0]]) variable_unit = get_variable_unit(dataset_name, dataset.variables[variable[0]]) scale_factor = get_variable_scale_factor( dataset_name, dataset.variables[variable[0]]) if anom: cmap = colormap.colormaps['anomaly'] else: cmap = colormap.find_colormap(variable_name) if depth != 'bottom': depthm = dataset.depths[depth] else: depthm = 0 if scale_factor != 1.0: for idx, val in enumerate(data): data[idx] = np.multiply(val, scale_factor) if variable_unit.startswith("Kelvin"): variable_unit = "Celsius" for idx, val in enumerate(data): data[idx] = np.add(val, -273.15) if len(data) == 1: data = data[0] if len(data) == 2: data = np.sqrt(data[0]**2 + data[1]**2) if not anom: cmap = colormap.colormaps.get('speed') if anom: with open_dataset(get_dataset_climatology(dataset_name)) as dataset: a = dataset.get_area(np.array([lat, lon]), depth, timestamp.month - 1, v, args.get('interp'), args.get('radius'), args.get('neighbours')) data -= a data = data.transpose() xpx = x * 256 ypx = y * 256 with Dataset(current_app.config['ETOPO_FILE'] % (projection, z), 'r') as dataset: bathymetry = dataset["z"][ypx:(ypx + 256), xpx:(xpx + 256)] bathymetry = gaussian_filter(bathymetry, 0.5) data[np.where(bathymetry > -depthm)] = np.ma.masked sm = matplotlib.cm.ScalarMappable(matplotlib.colors.Normalize( vmin=scale[0], vmax=scale[1]), cmap=cmap) img = sm.to_rgba(np.ma.masked_invalid(np.squeeze(data))) im = Image.fromarray((img * 255.0).astype(np.uint8)) buf = BytesIO() im.save(buf, format='PNG', optimize=True) return buf
def plot(self): gs, fig, velocity = self.gridSetup() # Plot the transect on a map if self.showmap: plt.subplot(gs[0, 0]) utils.path_plot(self.transect_data['points']) # Args: # subplots: a GridSpec object (gs) # map_subplot: Row number (Note: don't use consecutive rows to allow # for expanding figure height) # data: Data to be plotted # name: subplot title # cmapLabel: label for colourmap legend # vmin: minimum value for a variable (grabbed from the lowest value of some data) # vmax: maxmimum value for a variable (grabbed from the highest value of some data)onstrate a networked Ope # units: units for variable (PSU, Celsius, etc) # cmap: colormap for variable # def do_plot(subplots, map_subplot, data, name, cmapLabel, vmin, vmax, units, cmap): plt.subplot(subplots[map_subplot[0], map_subplot[1]]) divider = self._transect_plot(data, self.depth, name, vmin, vmax, cmapLabel, units, cmap) if self.surface: self._surface_plot(divider) """ Finds and returns the correct min/max values for the variable scale Args: scale: scale for the left or Right Map (self.scale or self.compare['scale]) data: transect_data Returns: (min, max) """ def find_minmax(scale, data): if scale: return (scale[0], scale[1]) else: return (np.amin(data), np.amax(data)) # Creates and places the plots def velocity_plot(): Row = 0 if self.showmap: Col = 1 else: Col = 0 if self.selected_velocity_plots[0] == 1: do_plot( gs, [Row, Col], self.transect_data['magnitude'], gettext("Magnitude") + gettext(" for ") + self.date_formatter(self.timestamp), gettext("Magnitude"), vmin, vmax, self.transect_data['unit'], self.cmap) Row += 1 if self.selected_velocity_plots[1] == 1: do_plot( gs, [Row, Col], self.transect_data['parallel'], self.transect_data['name'] + " (" + gettext("Parallel") + ")" + gettext(" for ") + self.date_formatter(self.timestamp), gettext("Parallel"), vmin, vmax, self.transect_data['unit'], self.cmap) Row += 1 if self.selected_velocity_plots[2] == 1: do_plot( gs, [Row, Col], self.transect_data['perpendicular'], self.transect_data['name'] + " (" + gettext("Perpendicular") + ")" + gettext(" for ") + self.date_formatter(self.timestamp), gettext("Perpendicular"), vmin, vmax, self.transect_data['unit'], self.cmap) # Plot Transects # If in compare mode Type = ['magnitude', 'parallel', 'perpendicular'] if self.compare: # Velocity has 2 components if velocity: if self.scale: vmin = self.scale[0] vmax = self.scale[1] else: vmin = min(np.amin(self.transect_data['parallel']), np.amin(self.transect_data['perpendicular'])) vmax = max(np.amax(self.transect_data['parallel']), np.amin(self.transect_data['perpendicular'])) vmin = min(vmin, -vmax) vmax = max(vmax, -vmin) # Get colormap for variable if self.showmap: Col = 1 else: Col = 0 do_plot( gs, [0, Col], self.transect_data['parallel'], self.transect_data['name'] + " (" + gettext("Parallel") + ")" + gettext(" for ") + self.date_formatter(self.timestamp), gettext("Parallel"), vmin, vmax, self.transect_data['unit'], self.cmap) Col += 1 do_plot( gs, [0, Col], self.transect_data['perpendicular'], self.transect_data['name'] + " (" + gettext("Perpendicular") + ")" + gettext(" for ") + self.date_formatter(self.timestamp), gettext("Perpendicular"), vmin, vmax, self.transect_data['unit'], self.cmap) if len(self.compare['variables']) == 2: if self.compare['scale']: vmin = self.compare['scale'][0] vmax = self.compare['scale'][1] else: vmin = min(np.amin(self.compare['parallel']), np.amin(self.compare['perpendicular'])) vmax = max(np.amax(self.compare['parallel']), np.amin(self.compare['perpendicular'])) vmin = min(vmin, -vmax) vmax = max(vmax, -vmin) # Get colormap for variable cmap = colormap.find_colormap(self.compare['colormap']) if self.showmap: Col = 1 else: Col = 0 do_plot( gs, [1, Col], self.compare['parallel'], self.transect_data['name'] + " (" + gettext("Parallel") + ")" + gettext(" for ") + self.date_formatter(self.compare['date']), gettext("Parallel"), vmin, vmax, self.transect_data['unit'], cmap) Col += 1 do_plot( gs, [1, Col], self.compare['perpendicular'], self.transect_data['name'] + " (" + gettext("Perpendicular") + ")" + gettext(" for ") + self.date_formatter(self.compare['date']), gettext("Perpendicular"), vmin, vmax, self.transect_data['unit'], cmap) else: vmin, vmax = utils.normalize_scale( self.transect_data['data'], self.dataset_config.variable[self.variables[0]]) # Render primary/Left Map if self.showmap: Col = 1 else: Col = 0 do_plot( gs, [0, Col], self.transect_data['data'], self.transect_data['name'] + gettext(" for ") + self.date_formatter(self.timestamp), self.transect_data['name'], vmin, vmax, self.transect_data['unit'], self.cmap) # Render Right Map vmin, vmax = utils.normalize_scale( self.transect_data['compare_data'], self.compare_config.variable[",".join( self.compare['variables'])]) if self.showmap: Col = 1 else: Col = 0 do_plot( gs, [1, Col], self.transect_data['compare_data'], self.compare['name'] + gettext(" for ") + self.date_formatter(self.compare['date']), self.compare['name'], vmin, vmax, self.compare['unit'], self.compare['colormap']) # Show a difference plot if both variables and datasets are the same if self.variables[0] == self.compare['variables'][0]: self.transect_data['difference'] = self.transect_data['data'] - \ self.transect_data['compare_data'] # Calculate variable range if self.compare['scale_diff'] is not None: vmin = self.compare['scale_diff'][0] vmax = self.compare['scale_diff'][1] else: vmin, vmax = find_minmax( self.compare['scale_diff'], self.transect_data['difference']) vmin = min(vmin, -vmax) vmax = max(vmax, -vmin) if self.showmap: Col = 1 else: Col = 0 do_plot( gs, [2, Col], self.transect_data['difference'], self.transect_data['name'] + gettext(" Difference"), self.transect_data['name'], vmin, vmax, self.transect_data[ 'unit'], # Since both variables are the same doesn't matter which view we reference colormap.find_colormap( self.compare['colormap_diff'] ) # Colormap for difference graphs ) # Not comparing else: # Velocity has 3 possible components if velocity: if self.scale: vmin = self.scale[0] vmax = self.scale[1] else: vmin = min(np.amin(self.transect_data['magnitude']), np.amin(self.transect_data['parallel']), np.amin(self.transect_data['perpendicular'])) vmax = max(np.amax(self.transect_data['magnitude']), np.amax(self.transect_data['parallel']), np.amin(self.transect_data['perpendicular'])) vmin = min(vmin, -vmax) vmax = max(vmax, -vmin) Row = 0 velocity_plot() # All other variables have 1 component else: if self.showmap: Col = 1 else: Col = 0 if self.scale: vmin = self.scale[0] vmax = self.scale[1] else: vmin, vmax = utils.normalize_scale( self.transect_data['data'], self.dataset_config.variable[self.variables[0]]) do_plot( gs, [0, Col], self.transect_data['data'], self.transect_data['name'] + " for " + self.date_formatter(self.timestamp), self.transect_data['name'], vmin, vmax, self.transect_data['unit'], self.cmap) # Figure title if self.plotTitle is None or self.plotTitle == "": fig.suptitle("Transect Data for:\n%s" % (self.name), fontsize=15) else: fig.suptitle(self.plotTitle, fontsize=15) # Subplot padding fig.tight_layout(pad=2, w_pad=2, h_pad=2) fig.subplots_adjust(top=0.90 if self.compare else 0.85) return super(TransectPlotter, self).plot(fig)
def load_data(self): width_scale = 1.25 height_scale = 1.25 if self.projection == "EPSG:32661": # north pole projection near_pole, covers_pole = self.pole_proximity(self.points[0]) blat = min(self.bounds[0], self.bounds[2]) blat = 5 * np.floor(blat / 5) if self.centroid[0] > 80 or near_pole or covers_pole: self.plot_projection = ccrs.Stereographic( central_latitude=self.centroid[0], central_longitude=self.centroid[1], ) width_scale = 1.5 else: self.plot_projection = ccrs.LambertConformal( central_latitude=self.centroid[0], central_longitude=self.centroid[1], ) elif self.projection == "EPSG:3031": # south pole projection near_pole, covers_pole = self.pole_proximity(self.points[0]) blat = max(self.bounds[0], self.bounds[2]) blat = 5 * np.ceil(blat / 5) # is centerered close to the south pole if ((self.centroid[0] < -80 or self.bounds[1] < -80 or self.bounds[3] < -80) or covers_pole) or near_pole: self.plot_projection = ccrs.Stereographic( central_latitude=self.centroid[0], central_longitude=self.centroid[1], ) width_scale = 1.5 else: self.plot_projection = ccrs.LambertConformal( central_latitude=self.centroid[0], central_longitude=self.centroid[1], ) elif abs(self.centroid[1] - self.bounds[1]) > 90: if abs(self.bounds[3] - self.bounds[1]) > 360: raise ClientError( gettext( "You have requested an area that exceeds the width \ of the world. Thinking big is good but plots need to \ be less than 360 deg wide.")) self.plot_projection = ccrs.Mercator( central_longitude=self.centroid[1]) else: self.plot_projection = ccrs.LambertConformal( central_latitude=self.centroid[0], central_longitude=self.centroid[1]) proj_bounds = self.plot_projection.transform_points( self.pc_projection, np.array([self.bounds[1], self.bounds[3]]), np.array([self.bounds[0], self.bounds[2]]), ) proj_size = np.diff(proj_bounds, axis=0) width = proj_size[0][0] * width_scale height = proj_size[0][1] * height_scale aspect_ratio = height / width if aspect_ratio < 1: gridx = 500 gridy = int(500 * aspect_ratio) else: gridy = 500 gridx = int(500 / aspect_ratio) self.plot_res = basemap.get_resolution(height, width) x_grid, y_grid, self.plot_extent = cimg_transform.mesh_projection( self.plot_projection, gridx, gridy, x_extents=(-width / 2, width / 2), y_extents=(-height / 2, height / 2), ) latlon_grid = self.pc_projection.transform_points( self.plot_projection, x_grid, y_grid) self.longitude = latlon_grid[:, :, 0] self.latitude = latlon_grid[:, :, 1] variables_to_load = self.variables[:] # we don't want to change self,variables so copy it if self.__load_quiver(): variables_to_load.append(self.quiver["variable"]) with open_dataset(self.dataset_config, variable=variables_to_load, timestamp=self.time) as dataset: self.variable_unit = self.get_variable_units( dataset, self.variables)[0] self.variable_name = self.get_variable_names( dataset, self.variables)[0] if self.cmap is None: self.cmap = colormap.find_colormap(self.variable_name) if self.depth == "bottom": depth_value_map = "Bottom" else: self.depth = np.clip(int(self.depth), 0, len(dataset.depths) - 1) depth_value = dataset.depths[self.depth] depth_value_map = depth_value data = [] var = dataset.variables[self.variables[0]] if self.filetype in ["csv", "odv", "txt"]: d, depth_value_map = dataset.get_area( np.array([self.latitude, self.longitude]), self.depth, self.time, self.variables[0], self.interp, self.radius, self.neighbours, return_depth=True, ) else: d = dataset.get_area( np.array([self.latitude, self.longitude]), self.depth, self.time, self.variables[0], self.interp, self.radius, self.neighbours, ) data.append(d) if self.filetype not in ["csv", "odv", "txt"]: if len(var.dimensions) == 3: self.depth_label = "" elif self.depth == "bottom": self.depth_label = " at Bottom" else: self.depth_label = (" at " + str(int(np.round(depth_value_map))) + " m") self.data = data[0] quiver_data = [] # Store the quiver data on the same grid as the main variable. This # will only be used for CSV export. quiver_data_fullgrid = [] if self.__load_quiver(): var = dataset.variables[self.quiver["variable"]] quiver_unit = self.dataset_config.variable[var].unit quiver_name = self.dataset_config.variable[var].name quiver_x_var = self.dataset_config.variable[ var].east_vector_component quiver_y_var = self.dataset_config.variable[ var].north_vector_component quiver_x, quiver_y, _ = cimg_transform.mesh_projection( self.plot_projection, 50, 50, self.plot_extent[:2], self.plot_extent[2:], ) quiver_coords = self.pc_projection.transform_points( self.plot_projection, quiver_x, quiver_y) quiver_lon = quiver_coords[:, :, 0] quiver_lat = quiver_coords[:, :, 1] x_vals = dataset.get_area( np.array([quiver_lat, quiver_lon]), self.depth, self.time, quiver_x_var, self.interp, self.radius, self.neighbours, ) quiver_data.append(x_vals) y_vals = dataset.get_area( np.array([quiver_lat, quiver_lon]), self.depth, self.time, quiver_y_var, self.interp, self.radius, self.neighbours, ) quiver_data.append(y_vals) mag_data = dataset.get_area( np.array([quiver_lat, quiver_lon]), self.depth, self.time, self.quiver["variable"], self.interp, self.radius, self.neighbours, ) self.quiver_magnitude = mag_data # Get the quiver data on the same grid as the main # variable. x_vals = dataset.get_area( np.array([self.latitude, self.longitude]), self.depth, self.time, quiver_x_var, self.interp, self.radius, self.neighbours, ) quiver_data_fullgrid.append(x_vals) y_vals = dataset.get_area( np.array([self.latitude, self.longitude]), self.depth, self.time, quiver_y_var, self.interp, self.radius, self.neighbours, ) quiver_data_fullgrid.append(y_vals) self.quiver_name = self.get_variable_names( dataset, [self.quiver["variable"]])[0] self.quiver_longitude = quiver_lon self.quiver_latitude = quiver_lat self.quiver_unit = quiver_unit self.quiver_data = quiver_data self.quiver_data_fullgrid = quiver_data_fullgrid if all([ dataset.variables[v].is_surface_only() for v in variables_to_load ]): self.depth = 0 contour_data = [] if (self.contour is not None and self.contour["variable"] != "" and self.contour["variable"] != "none"): d = dataset.get_area( np.array([self.latitude, self.longitude]), self.depth, self.time, self.contour["variable"], self.interp, self.radius, self.neighbours, ) vc = self.dataset_config.variable[self.contour["variable"]] contour_unit = vc.unit contour_name = vc.name contour_data.append(d) self.contour_unit = contour_unit self.contour_name = contour_name self.contour_data = contour_data self.timestamp = dataset.nc_data.timestamp_to_iso_8601(self.time) if self.compare: self.variable_name += " Difference" compare_config = DatasetConfig(self.compare["dataset"]) with open_dataset( compare_config, variable=self.compare["variables"], timestamp=self.compare["time"], ) as dataset: data = [] for v in self.compare["variables"]: var = dataset.variables[v] d = dataset.get_area( np.array([self.latitude, self.longitude]), self.compare["depth"], self.compare["time"], v, self.interp, self.radius, self.neighbours, ) data.append(d) data = data[0] self.data -= data # Load bathymetry data self.bathymetry = overlays.bathymetry(self.latitude, self.longitude, blur=2) if self.depth != "bottom" and self.depth != 0: if quiver_data: quiver_bathymetry = overlays.bathymetry(quiver_lat, quiver_lon) self.data[np.where( self.bathymetry < depth_value_map)] = np.ma.masked for d in self.quiver_data: d[np.where(quiver_bathymetry < depth_value)] = np.ma.masked for d in self.contour_data: d[np.where(self.bathymetry < depth_value_map)] = np.ma.masked else: mask = maskoceans(self.longitude, self.latitude, self.data, True, "h", 1.25).mask self.data[~mask] = np.ma.masked for d in self.quiver_data: mask = maskoceans(self.quiver_longitude, self.quiver_latitude, d).mask d[~mask] = np.ma.masked for d in contour_data: mask = maskoceans(self.longitude, self.latitude, d).mask d[~mask] = np.ma.masked if self.area and self.filetype in ["csv", "odv", "txt", "geotiff"]: area_polys = [] for a in self.area: rings = [LinearRing(p) for p in a["polygons"]] innerrings = [LinearRing(p) for p in a["innerrings"]] polygons = [] for r in rings: inners = [] for ir in innerrings: if r.contains(ir): inners.append(ir) polygons.append(Poly(r, inners)) area_polys.append(MultiPolygon(polygons)) points = [ Point(p) for p in zip(self.latitude.ravel(), self.longitude.ravel()) ] indicies = [] for a in area_polys: indicies.append( np.where( list(map(lambda p, poly=a: poly.contains(p), points)))[0]) indicies = np.unique(np.array(indicies).ravel()) newmask = np.ones(self.data.shape, dtype=bool) newmask[np.unravel_index(indicies, newmask.shape)] = False self.data.mask |= newmask self.depth_value_map = depth_value_map
def load_data(self): distance = VincentyDistance() height = distance.measure( (self.bounds[0], self.centroid[1]), (self.bounds[2], self.centroid[1])) * 1000 * 1.25 width = distance.measure( (self.centroid[0], self.bounds[1]), (self.centroid[0], self.bounds[3])) * 1000 * 1.25 if self.projection == 'EPSG:32661': # north pole projection near_pole, covers_pole = self.pole_proximity(self.points[0]) blat = min(self.bounds[0], self.bounds[2]) blat = 5 * np.floor(blat / 5) if self.centroid[0] > 80 or near_pole or covers_pole: self.basemap = basemap.load_map( 'npstere', self.centroid, height, width, min(self.bounds[0], self.bounds[2])) else: self.basemap = basemap.load_map('lcc', self.centroid, height, width) elif self.projection == 'EPSG:3031': # south pole projection near_pole, covers_pole = self.pole_proximity(self.points[0]) blat = max(self.bounds[0], self.bounds[2]) blat = 5 * np.ceil(blat / 5) if ((self.centroid[0] < -80 or self.bounds[1] < -80 or self.bounds[3] < -80) or covers_pole ) or near_pole: # is centerered close to the south pole self.basemap = basemap.load_map( 'spstere', self.centroid, height, width, max(self.bounds[0], self.bounds[2])) else: self.basemap = basemap.load_map('lcc', self.centroid, height, width) elif abs(self.centroid[1] - self.bounds[1]) > 90: height_bounds = [self.bounds[0], self.bounds[2]] width_bounds = [self.bounds[1], self.bounds[3]] height_buffer = (abs(height_bounds[1] - height_bounds[0])) * 0.1 width_buffer = (abs(width_bounds[0] - width_bounds[1])) * 0.1 if abs(width_bounds[1] - width_bounds[0]) > 360: raise ClientError( gettext( "You have requested an area that exceeds the width of the world. \ Thinking big is good but plots need to be less than 360 deg wide." )) if height_bounds[1] < 0: height_bounds[1] = height_bounds[1] + height_buffer else: height_bounds[1] = height_bounds[1] + height_buffer if height_bounds[0] < 0: height_bounds[0] = height_bounds[0] - height_buffer else: height_bounds[0] = height_bounds[0] - height_buffer new_width_bounds = [] new_width_bounds.append(width_bounds[0] - width_buffer) new_width_bounds.append(width_bounds[1] + width_buffer) if abs(new_width_bounds[1] - new_width_bounds[0]) > 360: width_buffer = np.floor( (360 - abs(width_bounds[1] - width_bounds[0])) / 2) new_width_bounds[0] = width_bounds[0] - width_buffer new_width_bounds[1] = width_bounds[1] + width_buffer if new_width_bounds[0] < -360: new_width_bounds[0] = -360 if new_width_bounds[1] > 720: new_width_bounds[1] = 720 self.basemap = basemap.load_map( 'merc', self.centroid, (height_bounds[0], height_bounds[1]), (new_width_bounds[0], new_width_bounds[1])) else: self.basemap = basemap.load_map('lcc', self.centroid, height, width) if self.basemap.aspect < 1: gridx = 500 gridy = int(500 * self.basemap.aspect) else: gridy = 500 gridx = int(500 / self.basemap.aspect) self.longitude, self.latitude = self.basemap.makegrid(gridx, gridy) with open_dataset(get_dataset_url(self.dataset_name)) as dataset: if self.time < 0: self.time += len(dataset.timestamps) self.time = np.clip(self.time, 0, len(dataset.timestamps) - 1) self.variable_unit = self.get_variable_units( dataset, self.variables)[0] self.variable_name = self.get_variable_names( dataset, self.variables)[0] scale_factor = self.get_variable_scale_factors( dataset, self.variables)[0] if self.cmap is None: if len(self.variables) == 1: self.cmap = colormap.find_colormap(self.variable_name) else: self.cmap = colormap.colormaps.get('speed') if len(self.variables) == 2: self.variable_name = self.vector_name(self.variable_name) if self.depth == 'bottom': depth_value = 'Bottom' else: self.depth = np.clip(int(self.depth), 0, len(dataset.depths) - 1) depth_value = dataset.depths[self.depth] data = [] allvars = [] for v in self.variables: var = dataset.variables[v] allvars.append(v) if self.filetype in ['csv', 'odv', 'txt']: d, depth_value = dataset.get_area(np.array( [self.latitude, self.longitude]), self.depth, self.time, v, self.interp, self.radius, self.neighbours, return_depth=True) else: d = dataset.get_area( np.array([self.latitude, self.longitude]), self.depth, self.time, v, self.interp, self.radius, self.neighbours) d = np.multiply(d, scale_factor) self.variable_unit, d = self.kelvin_to_celsius( self.variable_unit, d) data.append(d) if self.filetype not in ['csv', 'odv', 'txt']: if len(var.dimensions) == 3: self.depth_label = "" elif self.depth == 'bottom': self.depth_label = " at Bottom" else: self.depth_label = " at " + \ str(int(np.round(depth_value))) + " m" if len(data) == 2: data[0] = np.sqrt(data[0]**2 + data[1]**2) self.data = data[0] quiver_data = [] # Store the quiver data on the same grid as the main variable. This # will only be used for CSV export. quiver_data_fullgrid = [] if self.quiver is not None and \ self.quiver['variable'] != '' and \ self.quiver['variable'] != 'none': for v in self.quiver['variable'].split(','): allvars.append(v) var = dataset.variables[v] quiver_unit = get_variable_unit(self.dataset_name, var) quiver_name = get_variable_name(self.dataset_name, var) quiver_lon, quiver_lat = self.basemap.makegrid(50, 50) d = dataset.get_area( np.array([quiver_lat, quiver_lon]), self.depth, self.time, v, self.interp, self.radius, self.neighbours, ) quiver_data.append(d) # Get the quiver data on the same grid as the main # variable. d = dataset.get_area( np.array([self.latitude, self.longitude]), self.depth, self.time, v, self.interp, self.radius, self.neighbours, ) quiver_data_fullgrid.append(d) self.quiver_name = self.vector_name(quiver_name) self.quiver_longitude = quiver_lon self.quiver_latitude = quiver_lat self.quiver_unit = quiver_unit self.quiver_data = quiver_data self.quiver_data_fullgrid = quiver_data_fullgrid if all( [len(dataset.variables[v].dimensions) == 3 for v in allvars]): self.depth = 0 contour_data = [] if self.contour is not None and \ self.contour['variable'] != '' and \ self.contour['variable'] != 'none': d = dataset.get_area( np.array([self.latitude, self.longitude]), self.depth, self.time, self.contour['variable'], self.interp, self.radius, self.neighbours, ) contour_unit = get_variable_unit( self.dataset_name, dataset.variables[self.contour['variable']]) contour_name = get_variable_name( self.dataset_name, dataset.variables[self.contour['variable']]) contour_factor = get_variable_scale_factor( self.dataset_name, dataset.variables[self.contour['variable']]) contour_unit, d = self.kelvin_to_celsius(contour_unit, d) d = np.multiply(d, contour_factor) contour_data.append(d) self.contour_unit = contour_unit self.contour_name = contour_name self.contour_data = contour_data self.timestamp = dataset.timestamps[self.time] if self.compare: self.variable_name += " Difference" with open_dataset(get_dataset_url( self.compare['dataset'])) as dataset: data = [] for v in self.compare['variables']: var = dataset.variables[v] d = dataset.get_area( np.array([self.latitude, self.longitude]), self.compare['depth'], self.compare['time'], v, self.interp, self.radius, self.neighbours, ) data.append(d) if len(data) == 2: data = np.sqrt(data[0]**2 + data[1]**2) else: data = data[0] u, data = self.kelvin_to_celsius( dataset.variables[self.compare['variables'][0]].unit, data) self.data -= data # Load bathymetry data self.bathymetry = overlays.bathymetry(self.basemap, self.latitude, self.longitude, blur=2) if self.depth != 'bottom' and self.depth != 0: if len(quiver_data) > 0: quiver_bathymetry = overlays.bathymetry( self.basemap, quiver_lat, quiver_lon) self.data[np.where(self.bathymetry < depth_value)] = np.ma.masked for d in self.quiver_data: d[np.where(quiver_bathymetry < depth_value)] = np.ma.masked for d in self.contour_data: d[np.where(self.bathymetry < depth_value)] = np.ma.masked else: mask = maskoceans(self.longitude, self.latitude, self.data, True, 'h', 1.25).mask self.data[~mask] = np.ma.masked for d in self.quiver_data: mask = maskoceans(self.quiver_longitude, self.quiver_latitude, d).mask d[~mask] = np.ma.masked for d in contour_data: mask = maskoceans(self.longitude, self.latitude, d).mask d[~mask] = np.ma.masked if self.area and self.filetype in ['csv', 'odv', 'txt', 'geotiff']: area_polys = [] for a in self.area: rings = [LinearRing(p) for p in a['polygons']] innerrings = [LinearRing(p) for p in a['innerrings']] polygons = [] for r in rings: inners = [] for ir in innerrings: if r.contains(ir): inners.append(ir) polygons.append(Poly(r, inners)) area_polys.append(MultiPolygon(polygons)) points = [ Point(p) for p in zip(self.latitude.ravel(), self.longitude.ravel()) ] indicies = [] for a in area_polys: indicies.append( np.where( list(map(lambda p, poly=a: poly.contains(p), points)))[0]) indicies = np.unique(np.array(indicies).ravel()) newmask = np.ones(self.data.shape, dtype=bool) newmask[np.unravel_index(indicies, newmask.shape)] = False self.data.mask |= newmask self.depth_value = depth_value
def load_data(self): platform = db.session.query(Platform).get(self.platform) self.name = platform.unique_id # First get the variable st0 = db.session.query(Station).filter( Station.platform == platform).first() datatype_keys = db.session.query(db.func.distinct( Sample.datatype_key)).filter(Sample.station == st0).all() datatypes = db.session.query(DataType).filter( DataType.key.in_(datatype_keys)).order_by(DataType.key).all() variables = [datatypes[int(x)] for x in self.trackvariables] self.data_names = [dt.name for dt in variables] self.data_units = [dt.unit for dt in variables] self.track_cmaps = [ colormap.find_colormap(dt.name) for dt in variables ] d = [] for v in variables: d.append( get_platform_variable_track( db.session, platform, v.key, self.track_quantum, starttime=self.starttime, endtime=self.endtime, )) d = np.array(d) self.points = d[0, :, 1:3].astype(float) add_tz_utc = np.vectorize(lambda x: x.replace(tzinfo=pytz.UTC)) self.times = add_tz_utc(d[0, :, 0]) self.data = d[:, :, 4].astype(float) self.depth = d[0, :, 3].astype(float) d_delta = [ distance(p0, p1).km for p0, p1 in zip(self.points[0:-1], self.points[1:]) ] d_delta.insert(0, 0) self.distances = np.cumsum(d_delta) start = int( datetime_to_timestamp(self.times[0], self.dataset_config.time_dim_units)) end = int( datetime_to_timestamp(self.times[-1], self.dataset_config.time_dim_units)) points_simplified = self.points if len(self.points) > 100: points_simplified = np.array(vw.simplify(self.points, number=100)) if len(self.variables) > 0: with open_dataset(self.dataset_config, timestamp=start, endtime=end, variable=self.variables, nearest_timestamp=True) as dataset: # Make distance -> time function dist_to_time = interp1d( self.distances, [time.mktime(t.timetuple()) for t in self.times], assume_sorted=True, bounds_error=False, ) output_times = dist_to_time( np.linspace(0, self.distances[-1], 100)) model_times = sorted([ time.mktime(t.timetuple()) for t in dataset.nc_data.timestamps ]) self.model_depths = dataset.depths d = [] depth = 0 for v in self.variables: if len(np.unique(self.depth)) > 1: pts, dist, md, dep = dataset.get_path_profile( points_simplified, v, int( datetime_to_timestamp( dataset.nc_data.timestamps[0], self.dataset_config.time_dim_units)), endtime=int( datetime_to_timestamp( dataset.nc_data.timestamps[-1], self.dataset_config.time_dim_units)), ) if len(model_times) > 1: f = interp1d( model_times, md.filled(np.nan), assume_sorted=True, bounds_error=False, ) ot = dist_to_time(dist) od = f(ot).diagonal(0, 0, 2).copy() else: od = md # Clear model data beneath observed data od[np.where(self.model_depths > max(self.depth) )[0][1:], :] = np.nan d.append(od) mt = [ int( datetime_to_timestamp( t, self.dataset_config.time_dim_units)) for t in dataset.nc_data.timestamps ] model_dist = dist else: pts, dist, mt, md = dataset.get_path( self.points, depth, v, datetime_to_timestamp( dataset.nc_data.timestamps[0], self.dataset_config.time_dim_units), endtime=datetime_to_timestamp( dataset.nc_data.timestamps[-1], self.dataset_config.time_dim_units), times=output_times) model_dist = dist if len(model_times) > 1: f = interp1d( model_times, md, assume_sorted=True, bounds_error=False, ) d.append(np.diag(f(mt))) else: d.append(md) model_data = np.ma.array(d) variable_units = [] variable_names = [] scale_factors = [] cmaps = [] for v in self.variables: vc = self.dataset_config.variable[v] variable_units.append(vc.unit) variable_names.append(vc.name) scale_factors.append(vc.scale_factor) cmaps.append(colormap.find_colormap(vc.name)) for idx, sf in enumerate(scale_factors): model_data[idx, :] = np.multiply(model_data[idx, :], sf) self.model_data = model_data self.model_dist = model_dist self.model_times = list( map(datetime.datetime.utcfromtimestamp, model_times)) self.variable_names = variable_names self.variable_units = variable_units self.cmaps = cmaps
def plot(projection, x, y, z, args): lat, lon = get_latlon_coords(projection, x, y, z) if len(lat.shape) == 1: lat, lon = np.meshgrid(lat, lon) dataset_name = args.get("dataset") config = DatasetConfig(dataset_name) variable = args.get("variable") variable = variable.split(",") depth = args.get("depth") scale = args.get("scale") scale = [float(component) for component in scale.split(",")] time = args.get("time") data = [] with open_dataset(config, variable=variable, timestamp=time) as dataset: for v in variable: data.append( dataset.get_area( np.array([lat, lon]), depth, time, v, args.get("interp"), args.get("radius"), args.get("neighbours"), )) vc = config.variable[dataset.variables[variable[0]]] variable_name = vc.name variable_unit = vc.unit cmap = colormap.find_colormap(variable_name) if depth != "bottom": depthm = dataset.depths[depth] else: depthm = 0 if len(data) == 1: data = data[0] if len(data) == 2: data = np.sqrt(data[0]**2 + data[1]**2) cmap = colormap.colormaps.get("speed") data = data.transpose() xpx = x * 256 ypx = y * 256 # Mask out any topography if we're below the vector-tile threshold if z < 8: with Dataset(current_app.config["ETOPO_FILE"] % (projection, z), "r") as dataset: bathymetry = dataset["z"][ypx:(ypx + 256), xpx:(xpx + 256)] bathymetry = gaussian_filter(bathymetry, 0.5) data[np.where(bathymetry > -depthm)] = np.ma.masked sm = matplotlib.cm.ScalarMappable(matplotlib.colors.Normalize( vmin=scale[0], vmax=scale[1]), cmap=cmap) img = sm.to_rgba(np.ma.masked_invalid(np.squeeze(data))) im = Image.fromarray((img * 255.0).astype(np.uint8)) buf = BytesIO() im.save(buf, format="PNG", optimize=True) return buf
def load_data(self): vars_to_load = self.variables if self.surface: vars_to_load.append(self.surface) with open_dataset(self.dataset_config, timestamp=self.time, variable=vars_to_load) as dataset: for idx, v in enumerate(self.variables): var = dataset.variables[v] if not (set(var.dimensions) & set(dataset.nc_data.depth_dimensions)): for potential in dataset.variables: if potential in self.variables: continue pot = dataset.variables[potential] if set(pot.dimensions) & set( dataset.nc_data.depth_dimensions): if len(pot.dimensions) > 3: self.variables[idx] = potential.key value = parallel = perpendicular = magnitude = None variable_names = self.get_variable_names(dataset, self.variables) variable_units = self.get_variable_units(dataset, self.variables) # Load data sent from primary/Left Map if len(self.variables) > 1: # Only velocity has 2 variables v = [] for name in self.variables: v.append(dataset.variables[name]) distances, times, lat, lon, bearings = geo.path_to_points( self.points, 100) # Calculate vector components transect_pts, distance, x, dep = dataset.get_path_profile( self.points, self.variables[0], self.time, numpoints=100) transect_pts, distance, y, dep = dataset.get_path_profile( self.points, self.variables[1], self.time, numpoints=100) r = np.radians(np.subtract(90, bearings)) theta = np.arctan2(y, x) - r magnitude = np.sqrt(x**2 + y**2) parallel = magnitude * np.cos(theta) perpendicular = magnitude * np.sin(theta) else: # Get data for one variable transect_pts, distance, value, dep = dataset.get_path_profile( self.points, self.variables[0], self.time) if len(self.variables) == 2: variable_names = [ self.get_vector_variable_name(dataset, self.variables) ] variable_units = [ self.get_vector_variable_unit(dataset, self.variables) ] # If a colourmap has not been manually specified by the # Navigator... if self.cmap is None: self.cmap = colormap.find_colormap(variable_names[0]) self.iso_timestamp = dataset.nc_data.timestamp_to_iso_8601( self.time) self.depth = dep self.depth_unit = "m" self.transect_data = { "points": transect_pts, "distance": distance, "data": value, "name": variable_names[0], "unit": variable_units[0], "parallel": parallel, "perpendicular": perpendicular, "magnitude": magnitude, } if self.surface: surface_pts, surface_dist, _, surface_value = dataset.get_path( self.points, 0, self.surface, self.time) vc = self.dataset_config.variable[dataset.variables[ self.surface]] surface_unit = vc.unit surface_name = vc.name surface_value = np.multiply(surface_value, surface_factor) self.surface_data = { "config": vc, "points": surface_pts, "distance": surface_dist, "data": surface_value, "name": surface_name, "unit": surface_unit, } # Load data sent from Right Map (if in compare mode) if self.compare: def interpolate_depths(data, depth_in, depth_out): output = [] for i in range(0, depth_in.shape[0]): f = interp1d( depth_in[i], data[:, i], bounds_error=False, assume_sorted=True, ) output.append( f(depth_out[i].view(np.ma.MaskedArray).filled())) return np.ma.masked_invalid(output).transpose() self.compare_config = DatasetConfig(self.compare["dataset"]) self.compare["time"] = int(self.compare["time"]) with open_dataset( self.compare_config, timestamp=self.compare["time"], variable=self.compare["variables"], ) as dataset: self.compare[ "iso_timestamp"] = dataset.nc_data.timestamp_to_iso_8601( self.compare["time"]) # 1 variable if len(self.compare["variables"]) == 1: # Get and store the "nicely formatted" string for the variable name self.compare["name"] = self.get_variable_names( dataset, self.compare["variables"])[0] # Find correct colourmap if self.compare["colormap"] == "default": self.compare["colormap"] = colormap.find_colormap( self.compare["name"]) else: self.compare["colormap"] = colormap.find_colormap( self.compare["colormap"]) ( climate_points, climate_distance, climate_data, cdep, ) = dataset.get_path_profile(self.points, self.compare["variables"][0], self.compare["time"]) self.compare["unit"] = dataset.variables[ self.compare["variables"][0]].unit self.__fill_invalid_shift(climate_data) if (self.depth.shape != cdep.shape) or (self.depth != cdep).any(): # Need to interpolate the depths climate_data = interpolate_depths( climate_data, cdep, self.depth) if self.transect_data["data"] is None: self.transect_data["magnitude"] -= climate_data self.transect_data["parallel"] -= climate_data self.transect_data["perpendicular"] -= climate_data else: self.transect_data["compare_data"] = climate_data # Velocity variables else: # Get and store the "nicely formatted" string for the variable name self.compare["name"] = self.get_vector_variable_name( dataset, self.compare["variables"]) ( climate_pts, climate_distance, climate_x, cdep, ) = dataset.get_path_profile( self.points, self.compare["variables"][0], self.compare["time"], numpoints=100, ) ( climate_pts, climate_distance, climate_y, cdep, ) = dataset.get_path_profile( self.points, self.compare["variables"][0], self.compare["time"], numpoints=100, ) ( climate_distances, ctimes, clat, clon, bearings, ) = geo.path_to_points(self.points, 100) r = np.radians(np.subtract(90, bearings)) theta = np.arctan2(climate_y, climate_x) - r mag = np.sqrt(climate_x**2 + climate_y**2) if np.all(self.depth != cdep): theta = interpolate_depths(theta, cdep, self.depth) self.__fill_invalid_shift(theta) mag = interpolate_depths(mag, cdep, self.depth) self.__fill_invalid_shift(mag) self.compare["parallel"] = mag * np.cos(theta) self.compare["perpendicular"] = mag * np.sin(theta) """ if self.transect_data['parallel'] is None: self.transect_data['data'] -= mag else: self.transect_data['parallel'] -= climate_parallel self.transect_data['perpendicular'] -= climate_perpendicular """ # Bathymetry with Dataset(current_app.config["BATHYMETRY_FILE"], "r") as dataset: bath_x, bath_y = bathymetry( dataset.variables["y"], dataset.variables["x"], dataset.variables["z"], self.points, ) self.bathymetry = {"x": bath_x, "y": bath_y}
def load_data(self): """ Calculates and returns the depth, depth-value, and depth unit from a given dataset Args: depth: Stored depth information (self.depth or self.compare['depth']) clip_length: How many depth values to clip (usually len(dataset.depths) - 1) dataset: Opened dataset Returns: (depth, depth_value, depth_unit) """ def find_depth(depth, clip_length, dataset): depth_value = 0 depth_unit = "m" if depth: if depth == 'bottom': depth_value = 'Bottom' depth_unit = '' return (depth, depth_value, depth_unit) else: depth = np.clip(int(depth), 0, clip_length) depth_value = np.round(dataset.depths[depth]) depth_unit = "m" return (depth, depth_value, depth_unit) return (depth, depth_value, depth_unit) # Load left/Main Map with open_dataset(self.dataset_config) as dataset: latvar, lonvar = utils.get_latlon_vars(dataset) self.depth, self.depth_value, self.depth_unit = find_depth( self.depth, len(dataset.depths) - 1, dataset) self.fix_startend_times(dataset, self.starttime, self.endtime) time = list(range(self.starttime, self.endtime + 1)) if len(self.variables) > 1: v = [] for name in self.variables: self.path_points, self.distance, t, value = dataset.get_path( self.points, self.depth, time, name) v.append(value**2) value = np.sqrt(np.ma.sum(v, axis=0)) self.variable_name = self.get_vector_variable_name( dataset, self.variables) else: self.path_points, self.distance, t, value = dataset.get_path( self.points, self.depth, time, self.variables[0]) self.variable_name = self.get_variable_names( dataset, self.variables)[0] variable_units = self.get_variable_units(dataset, self.variables) scale_factors = self.get_variable_scale_factors( dataset, self.variables) self.variable_unit = variable_units[0] self.data = value self.times = dataset.timestamps[self.starttime:self.endtime + 1] self.data = np.multiply(self.data, scale_factors[0]) self.data = self.data.transpose() # Get colourmap if self.cmap is None: self.cmap = colormap.find_colormap(self.variable_name) # Load data sent from Right Map (if in compare mode) if self.compare: compare_config = DatasetConfig(self.compare['dataset']) with open_dataset(compare_config) as dataset: latvar, lonvar = utils.get_latlon_vars(dataset) self.compare['depth'], self.compare[ 'depth_value'], self.compare['depth_unit'] = find_depth( self.compare['depth'], len(dataset.depths) - 1, dataset) self.fix_startend_times(dataset, self.compare['starttime'], self.compare['endtime']) time = list( range(self.compare['starttime'], self.compare['endtime'] + 1)) if len(self.compare['variables']) > 1: v = [] for name in self.compare['variables']: path, distance, t, value = dataset.get_path( self.points, self.compare['depth'], time, name) v.append(value**2) value = np.sqrt(np.ma.sum(v, axis=0)) self.compare['variable_name'] = \ self.get_vector_variable_name(dataset, self.compare['variables']) else: path, distance, t, value = dataset.get_path( self.points, self.compare['depth'], time, self.compare['variables'][0]) self.compare['variable_name'] = self.get_variable_names( dataset, self.compare['variables'])[0] # Colourmap if (self.compare['colormap'] == 'default'): self.compare['colormap'] = colormap.find_colormap( self.compare['variable_name']) else: self.compare['colormap'] = colormap.find_colormap( self.compare['colormap']) variable_units = self.get_variable_units( dataset, self.compare['variables']) scale_factors = self.get_variable_scale_factors( dataset, self.compare['variables']) self.compare['variable_unit'] = variable_units[0] self.compare['data'] = value self.compare['times'] = dataset.timestamps[ self.compare['starttime']:self.compare['endtime'] + 1] self.compare['data'] = np.multiply(self.compare['data'], scale_factors[0]) self.compare['data'] = self.compare['data'].transpose() # Comparison over different time ranges makes no sense if self.starttime != self.compare['starttime'] or\ self.endtime != self.compare['endtime']: raise ClientError( gettext( "Please ensure the Start Time and End Time for the Left and Right maps are identical." ))
def plot(projection, x, y, z, args): lat, lon = get_latlon_coords(projection, x, y, z) if len(lat.shape) == 1: lat, lon = np.meshgrid(lat, lon) dataset_name = args.get('dataset') config = DatasetConfig(dataset_name) variable = args.get('variable') variable = variable.split(',') depth = args.get('depth') scale = args.get('scale') scale = [float(component) for component in scale.split(',')] data = [] with open_dataset(config) as dataset: if args.get('time') is None or (type(args.get('time')) == str and len(args.get('time')) == 0): time = -1 else: time = int(args.get('time')) t_len = len(dataset.timestamps) while time >= t_len: time -= t_len while time < 0: time += len(dataset.timestamps) timestamp = dataset.timestamps[time] for v in variable: data.append( dataset.get_area(np.array([lat, lon]), depth, time, v, args.get('interp'), args.get('radius'), args.get('neighbours'))) vc = config.variable[dataset.variables[variable[0]]] variable_name = vc.name variable_unit = vc.unit scale_factor = vc.scale_factor cmap = colormap.find_colormap(variable_name) if depth != 'bottom': depthm = dataset.depths[depth] else: depthm = 0 if scale_factor != 1.0: for idx, val in enumerate(data): data[idx] = np.multiply(val, scale_factor) if len(data) == 1: data = data[0] if len(data) == 2: data = np.sqrt(data[0]**2 + data[1]**2) cmap = colormap.colormaps.get('speed') data = data.transpose() xpx = x * 256 ypx = y * 256 # Mask out any topography if we're below the vector-tile threshold if z < 8: with Dataset(current_app.config['ETOPO_FILE'] % (projection, z), 'r') as dataset: bathymetry = dataset["z"][ypx:(ypx + 256), xpx:(xpx + 256)] bathymetry = gaussian_filter(bathymetry, 0.5) data[np.where(bathymetry > -depthm)] = np.ma.masked sm = matplotlib.cm.ScalarMappable(matplotlib.colors.Normalize( vmin=scale[0], vmax=scale[1]), cmap=cmap) img = sm.to_rgba(np.ma.masked_invalid(np.squeeze(data))) im = Image.fromarray((img * 255.0).astype(np.uint8)) buf = BytesIO() im.save(buf, format='PNG', optimize=True) return buf
def plot(self): if self.filetype == "geotiff": f, fname = tempfile.mkstemp() os.close(f) driver = gdal.GetDriverByName("GTiff") outRaster = driver.Create( fname, self.latitude.shape[1], self.longitude.shape[0], 1, gdal.GDT_Float64, ) x = np.array([self.longitude[0, 0], self.longitude[-1, -1]]) y = np.array([self.latitude[0, 0], self.latitude[-1, -1]]) outRasterSRS = osr.SpatialReference() pts = self.plot_projection.transform_points( self.pc_projection, x, y) x = pts[:, 0] y = pts[:, 1] outRasterSRS.ImportFromProj4(self.plot_projection.proj4_init) pixelWidth = (x[-1] - x[0]) / self.longitude.shape[0] pixelHeight = (y[-1] - y[0]) / self.latitude.shape[0] outRaster.SetGeoTransform( (x[0], pixelWidth, 0, y[0], 0, pixelHeight)) outband = outRaster.GetRasterBand(1) d = self.data.astype(np.float64) ndv = d.fill_value outband.WriteArray(d.filled(ndv)) outband.SetNoDataValue(ndv) outRaster.SetProjection(outRasterSRS.ExportToWkt()) outband.FlushCache() outRaster = None with open(fname, "r", encoding="latin-1") as f: buf = f.read() os.remove(fname) return (buf, self.mime, self.filename.replace(".geotiff", ".tif")) # Figure size figuresize = list(map(float, self.size.split("x"))) fig, map_plot = basemap.load_map( self.plot_projection, self.plot_extent, figuresize, self.dpi, self.plot_res, ) ax = plt.gca() if self.scale: vmin = self.scale[0] vmax = self.scale[1] else: vmin, vmax = utils.normalize_scale( self.data, self.dataset_config.variable[f"{self.variables[0]}"]) c = map_plot.imshow( self.data, vmin=vmin, vmax=vmax, cmap=self.cmap, extent=self.plot_extent, transform=self.plot_projection, origin="lower", zorder=0, ) if len(self.quiver_data) == 2: qx, qy = self.quiver_data qx, qy = self.plot_projection.transform_vectors( self.pc_projection, self.quiver_longitude, self.quiver_latitude, qx, qy) pts = self.plot_projection.transform_points( self.pc_projection, self.quiver_longitude, self.quiver_latitude) x = pts[:, :, 0] y = pts[:, :, 1] qx = np.ma.masked_where(np.ma.getmask(self.quiver_data[0]), qx) qy = np.ma.masked_where(np.ma.getmask(self.quiver_data[1]), qy) if self.quiver["magnitude"] != "length": qx = qx / self.quiver_magnitude qy = qy / self.quiver_magnitude qscale = 50 else: qscale = None if self.quiver["magnitude"] == "color": if (self.quiver["colormap"] is None or self.quiver["colormap"] == "default"): qcmap = colormap.colormaps.get("speed") else: qcmap = colormap.colormaps.get(self.quiver["colormap"]) q = map_plot.quiver( x, y, qx, qy, self.quiver_magnitude, width=0.0035, headaxislength=4, headlength=4, scale=qscale, pivot="mid", cmap=qcmap, transform=self.plot_projection, ) else: q = map_plot.quiver( x, y, qx, qy, width=0.0025, headaxislength=4, headlength=4, scale=qscale, pivot="mid", transform=self.plot_projection, zorder=6, ) if self.quiver["magnitude"] == "length": unit_length = np.mean(self.quiver_magnitude) * 2 unit_length = np.round(unit_length, -int(np.floor(np.log10(unit_length)))) if unit_length >= 1: unit_length = int(unit_length) plt.quiverkey( q, 0.65, 0.01, unit_length, self.quiver_name.title() + " " + str(unit_length) + " " + utils.mathtext(self.quiver_unit), coordinates="figure", labelpos="E", ) if self.show_bathymetry: # Plot bathymetry on top cs = map_plot.contour( self.longitude, self.latitude, self.bathymetry, linewidths=0.5, norm=FuncNorm((lambda x: np.log10(x), lambda x: 10**x), vmin=1, vmax=6000), cmap="Greys", levels=[100, 200, 500, 1000, 2000, 3000, 4000, 5000, 6000], transform=self.pc_projection, zorder=4, ) plt.clabel(cs, fontsize="x-large", fmt="%1.0fm") if self.area and self.show_area: for a in self.area: polys = [] for co in a["polygons"] + a["innerrings"]: coords = np.array(co).transpose() coords_transform = self.plot_projection.transform_points( self.pc_projection, coords[1], coords[0]) mx = coords_transform[:, 0] my = coords_transform[:, 1] map_coords = list(zip(mx, my)) polys.append(Polygon(map_coords)) paths = [] for poly in polys: paths.append(poly.get_path()) path = Path.make_compound_path(*paths) for ec, lw in zip(["w", "k"], [5, 3]): poly = PathPatch( path, fill=None, edgecolor=ec, linewidth=lw, transform=self.plot_projection, zorder=3, ) map_plot.add_patch(poly) if self.names is not None and len(self.names) > 1: for idx, name in enumerate(self.names): pts = self.plot_projection.transform_points( self.pc_projection, self.centroids[idx].x, self.centroids[idx].y) x = pts[:, 0] y = pts[:, 1] plt.annotate( xy=(x, y), s=name, ha="center", va="center", size=12, # weight='bold' ) if len(self.contour_data) > 0: if self.contour_data[0].min() != self.contour_data[0].max(): cmin, cmax = utils.normalize_scale( self.contour_data[0], self.dataset_config.variable[self.contour["variable"]], ) levels = None if (self.contour.get("levels") is not None and self.contour["levels"] != "auto" and self.contour["levels"] != ""): try: levels = list( set([ float(xx) for xx in self.contour["levels"].split(",") if xx.strip() ])) levels.sort() except ValueError: pass if levels is None: levels = np.linspace(cmin, cmax, 5) cmap = self.contour["colormap"] if cmap is not None: cmap = colormap.colormaps.get(cmap) if cmap is None: cmap = colormap.find_colormap(self.contour_name) if not self.contour.get("hatch"): contours = map_plot.contour( self.longitude, self.latitude, self.contour_data[0], linewidths=2, levels=levels, cmap=cmap, transform=self.pc_projection, zorder=5, ) else: hatches = [ "//", "xx", "\\\\", "--", "||", "..", "oo", "**" ] if len(levels) + 1 < len(hatches): hatches = hatches[0:len(levels) + 2] map_plot.contour( self.longitude, self.latitude, self.contour_data[0], linewidths=1, levels=levels, colors="k", transform=self.pc_projection, zorder=5, ) contours = map_plot.contourf( self.longitude, self.latitude, self.contour_data[0], colors=["none"], levels=levels, hatches=hatches, vmin=cmin, vmax=cmax, extend="both", transform=self.pc_projection, zorder=5, ) if self.contour["legend"]: handles, l = contours.legend_elements() labels = [] for i, lab in enumerate(l): if self.contour.get("hatch"): if self.contour_unit == "fraction": if i == 0: labels.append( "$x \\leq {0: .0f}\\%$".format( levels[i] * 100)) elif i == len(levels): labels.append("$x > {0: .0f}\\%$".format( levels[i - 1] * 100)) else: labels.append( "${0:.0f}\\% < x \\leq {1:.0f}\\%$". format(levels[i - 1] * 100, levels[i] * 100)) else: if i == 0: labels.append("$x \\leq %.3g$" % levels[i]) elif i == len(levels): labels.append("$x > %.3g$" % levels[i - 1]) else: labels.append("$%.3g < x \\leq %.3g$" % (levels[i - 1], levels[i])) else: if self.contour_unit == "fraction": labels.append("{0:.0%}".format(levels[i])) else: labels.append( "%.3g %s" % (levels[i], utils.mathtext(self.contour_unit))) ax = plt.gca() if self.contour_unit != "fraction" and not self.contour.get( "hatch"): contour_title = "%s (%s)" % ( self.contour_name, utils.mathtext(self.contour_unit), ) else: contour_title = self.contour_name leg = ax.legend( handles[::-1], labels[::-1], loc="lower left", fontsize="medium", frameon=True, framealpha=0.75, title=contour_title, ) leg.get_title().set_fontsize("medium") if not self.contour.get("hatch"): for legobj in leg.legendHandles: legobj.set_linewidth(3) title = self.plotTitle if self.plotTitle is None or self.plotTitle == "": area_title = "\n".join(wrap(", ".join(self.names), 60)) + "\n" title = "%s %s %s, %s" % ( area_title, self.variable_name.title(), self.depth_label, self.date_formatter(self.timestamp), ) plt.title(title.strip()) axpos = map_plot.get_position() pos_x = axpos.x0 + axpos.width + 0.01 pos_y = axpos.y0 cax = fig.add_axes([pos_x, pos_y, 0.03, axpos.height]) bar = plt.colorbar(c, cax=cax) bar.set_label( "%s (%s)" % (self.variable_name.title(), utils.mathtext(self.variable_unit)), fontsize=14, ) if (self.quiver is not None and self.quiver["variable"] != "" and self.quiver["variable"] != "none" and self.quiver["magnitude"] == "color"): pos_x = axpos.x0 pos_y = axpos.y0 - 0.05 bax = fig.add_axes([pos_x, pos_y, axpos.width, 0.03]) qbar = plt.colorbar(q, orientation="horizontal", cax=bax) qbar.set_label( self.quiver_name.title() + " " + utils.mathtext(self.quiver_unit), fontsize=14, ) return super(MapPlotter, self).plot(fig)
def plot(self): if self.scale: vmin = self.scale[0] vmax = self.scale[1] else: vmin, vmax = utils.normalize_scale( self.data, self.dataset_config.variable[self.variables[0]]) if self.cmap is None: self.cmap = colormap.find_colormap(self.variable_name) datenum = matplotlib.dates.date2num(self.times) if self.depth == 'all': size = list(map(float, self.size.split("x"))) numpoints = len(self.points) figuresize = (size[0], size[1] * numpoints) fig, ax = plt.subplots(numpoints, 1, sharex=True, figsize=figuresize, dpi=self.dpi) if not isinstance(ax, np.ndarray): ax = [ax] for idx, p in enumerate(self.points): d = self.data[idx, 0, :] dlim = np.ma.flatnotmasked_edges(d[0, :]) maxdepth = self.depths[dlim[1]].max() mindepth = self.depths[dlim[0]].min() c = ax[idx].pcolormesh(datenum, self.depths[:dlim[1] + 1], d[:, :dlim[1] + 1].transpose(), shading='gouraud', cmap=self.cmap, vmin=vmin, vmax=vmax) ax[idx].invert_yaxis() if maxdepth > LINEAR: ax[idx].set_yscale('symlog', linthreshy=LINEAR) ax[idx].yaxis.set_major_formatter(ScalarFormatter()) if maxdepth > LINEAR: l = 10**np.floor(np.log10(maxdepth)) ax[idx].set_ylim(np.ceil(maxdepth / l) * l, mindepth) ax[idx].set_yticks( list(ax[idx].get_yticks()) + [maxdepth, LINEAR]) else: ax[idx].set_ylim(maxdepth, mindepth) ax[idx].set_ylabel("Depth (%s)" % utils.mathtext(self.depth_unit)) ax[idx].xaxis_date() ax[idx].set_xlim(datenum[0], datenum[-1]) divider = make_axes_locatable(ax[idx]) cax = divider.append_axes("right", size="5%", pad=0.05) bar = plt.colorbar(c, cax=cax) bar.set_label("%s (%s)" % (self.variable_name.title(), utils.mathtext(self.variable_unit))) ax[idx].set_title("%s%s at %s" % (self.variable_name.title(), self.depth_label, self.names[idx])) plt.setp(ax[idx].get_xticklabels(), rotation=30) fig.autofmt_xdate() else: # Create base figure figure_size = self.figuresize figure_size[0] *= 1.5 if self.showmap else 1.0 fig = plt.figure(figsize=figure_size, dpi=self.dpi) # Setup figure layout width = 1 if self.showmap: width += 1 # Horizontally scale the actual plots by 2x the size of # the location map width_ratios = [1, 2] else: width_ratios = None # Create layout helper gs = gridspec.GridSpec(1, width, width_ratios=width_ratios) subplot = 0 # Render point location if self.showmap: plt.subplot(gs[0, 0]) subplot += 1 utils.point_plot( np.array([ [x[0] for x in self.points], # Latitudes [x[1] for x in self.points] ])) # Longitudes plt.subplot(gs[:, subplot]) plt.plot_date(datenum, np.squeeze(self.data), fmt='-', figure=fig, xdate=True) plt.ylabel( f"{self.variable_name.title()} ({utils.mathtext(self.variable_unit)})", fontsize=14) plt.ylim(vmin, vmax) # Title if self.plotTitle is None or self.plotTitle == "": wrapped_title = wrap( "%s%s at %s" % (self.variable_name.title(), self.depth_label, ", ".join(self.names)), 80) plt.title("\n".join(wrapped_title), fontsize=15) else: plt.title(self.plotTitle, fontsize=15) plt.gca().grid(True) fig.autofmt_xdate() self.plot_legend(fig, self.names) return super(TimeseriesPlotter, self).plot(fig)
def load_data(self): distance = VincentyDistance() height = distance.measure( (self.bounds[0], self.centroid[1]), (self.bounds[2], self.centroid[1])) * 1000 * 1.25 width = distance.measure( (self.centroid[0], self.bounds[1]), (self.centroid[0], self.bounds[3])) * 1000 * 1.25 if self.projection == 'EPSG:32661': # north pole projection near_pole, covers_pole = self.pole_proximity(self.points[0]) blat = min(self.bounds[0], self.bounds[2]) blat = 5 * np.floor(blat / 5) if self.centroid[0] > 80 or near_pole or covers_pole: self.basemap = basemap.load_map( 'npstere', self.centroid, height, width, min(self.bounds[0], self.bounds[2])) else: self.basemap = basemap.load_map('lcc', self.centroid, height, width) elif self.projection == 'EPSG:3031': # south pole projection near_pole, covers_pole = self.pole_proximity(self.points[0]) blat = max(self.bounds[0], self.bounds[2]) blat = 5 * np.ceil(blat / 5) # is centerered close to the south pole if ((self.centroid[0] < -80 or self.bounds[1] < -80 or self.bounds[3] < -80) or covers_pole) or near_pole: self.basemap = basemap.load_map( 'spstere', self.centroid, height, width, max(self.bounds[0], self.bounds[2])) else: self.basemap = basemap.load_map('lcc', self.centroid, height, width) elif abs(self.centroid[1] - self.bounds[1]) > 90: height_bounds = [self.bounds[0], self.bounds[2]] width_bounds = [self.bounds[1], self.bounds[3]] height_buffer = (abs(height_bounds[1] - height_bounds[0])) * 0.1 width_buffer = (abs(width_bounds[0] - width_bounds[1])) * 0.1 if abs(width_bounds[1] - width_bounds[0]) > 360: raise ClientError( gettext( "You have requested an area that exceeds the width of the world. \ Thinking big is good but plots need to be less than 360 deg wide." )) if height_bounds[1] < 0: height_bounds[1] = height_bounds[1] + height_buffer else: height_bounds[1] = height_bounds[1] + height_buffer if height_bounds[0] < 0: height_bounds[0] = height_bounds[0] - height_buffer else: height_bounds[0] = height_bounds[0] - height_buffer new_width_bounds = [] new_width_bounds.append(width_bounds[0] - width_buffer) new_width_bounds.append(width_bounds[1] + width_buffer) if abs(new_width_bounds[1] - new_width_bounds[0]) > 360: width_buffer = np.floor( (360 - abs(width_bounds[1] - width_bounds[0])) / 2) new_width_bounds[0] = width_bounds[0] - width_buffer new_width_bounds[1] = width_bounds[1] + width_buffer if new_width_bounds[0] < -360: new_width_bounds[0] = -360 if new_width_bounds[1] > 720: new_width_bounds[1] = 720 self.basemap = basemap.load_map( 'merc', self.centroid, (height_bounds[0], height_bounds[1]), (new_width_bounds[0], new_width_bounds[1])) else: self.basemap = basemap.load_map('lcc', self.centroid, height, width) if self.basemap.aspect < 1: gridx = 500 gridy = int(500 * self.basemap.aspect) else: gridy = 500 gridx = int(500 / self.basemap.aspect) self.longitude, self.latitude = self.basemap.makegrid(gridx, gridy) variables_to_load = self.variables[:] # we don't want to change self,variables so copy it if self.__load_quiver(): variables_to_load.append(self.quiver['variable']) with open_dataset(self.dataset_config, variable=variables_to_load, timestamp=self.time) as dataset: self.variable_unit = self.get_variable_units( dataset, self.variables)[0] self.variable_name = self.get_variable_names( dataset, self.variables)[0] if self.cmap is None: self.cmap = colormap.find_colormap(self.variable_name) if self.depth == 'bottom': depth_value_map = 'Bottom' else: self.depth = np.clip(int(self.depth), 0, len(dataset.depths) - 1) depth_value = dataset.depths[self.depth] depth_value_map = depth_value data = [] var = dataset.variables[self.variables[0]] if self.filetype in ['csv', 'odv', 'txt']: d, depth_value_map = dataset.get_area(np.array( [self.latitude, self.longitude]), self.depth, self.time, self.variables[0], self.interp, self.radius, self.neighbours, return_depth=True) else: d = dataset.get_area(np.array([self.latitude, self.longitude]), self.depth, self.time, self.variables[0], self.interp, self.radius, self.neighbours) data.append(d) if self.filetype not in ['csv', 'odv', 'txt']: if len(var.dimensions) == 3: self.depth_label = "" elif self.depth == 'bottom': self.depth_label = " at Bottom" else: self.depth_label = " at " + \ str(int(np.round(depth_value_map))) + " m" self.data = data[0] quiver_data = [] # Store the quiver data on the same grid as the main variable. This # will only be used for CSV export. quiver_data_fullgrid = [] if self.__load_quiver(): var = dataset.variables[self.quiver['variable']] quiver_unit = self.dataset_config.variable[var].unit quiver_name = self.dataset_config.variable[var].name quiver_x_var = self.dataset_config.variable[ var].east_vector_component quiver_y_var = self.dataset_config.variable[ var].north_vector_component quiver_lon, quiver_lat = self.basemap.makegrid(50, 50) x_vals = dataset.get_area( np.array([quiver_lat, quiver_lon]), self.depth, self.time, quiver_x_var, self.interp, self.radius, self.neighbours, ) quiver_data.append(x_vals) y_vals = dataset.get_area( np.array([quiver_lat, quiver_lon]), self.depth, self.time, quiver_y_var, self.interp, self.radius, self.neighbours, ) quiver_data.append(y_vals) mag_data = dataset.get_area( np.array([quiver_lat, quiver_lon]), self.depth, self.time, self.quiver['variable'], self.interp, self.radius, self.neighbours, ) self.quiver_magnitude = mag_data # Get the quiver data on the same grid as the main # variable. x_vals = dataset.get_area( np.array([self.latitude, self.longitude]), self.depth, self.time, quiver_x_var, self.interp, self.radius, self.neighbours, ) quiver_data_fullgrid.append(x_vals) y_vals = dataset.get_area( np.array([self.latitude, self.longitude]), self.depth, self.time, quiver_y_var, self.interp, self.radius, self.neighbours, ) quiver_data_fullgrid.append(y_vals) self.quiver_name = self.get_variable_names( dataset, [self.quiver['variable']])[0] self.quiver_longitude = quiver_lon self.quiver_latitude = quiver_lat self.quiver_unit = quiver_unit self.quiver_data = quiver_data self.quiver_data_fullgrid = quiver_data_fullgrid if all([ dataset.variables[v].is_surface_only() for v in variables_to_load ]): self.depth = 0 contour_data = [] if self.contour is not None and \ self.contour['variable'] != '' and \ self.contour['variable'] != 'none': d = dataset.get_area( np.array([self.latitude, self.longitude]), self.depth, self.time, self.contour['variable'], self.interp, self.radius, self.neighbours, ) vc = self.dataset_config.variable[self.contour['variable']] contour_unit = vc.unit contour_name = vc.name contour_data.append(d) self.contour_unit = contour_unit self.contour_name = contour_name self.contour_data = contour_data self.timestamp = dataset.nc_data.timestamp_to_iso_8601(self.time) if self.compare: self.variable_name += " Difference" compare_config = DatasetConfig(self.compare['dataset']) with open_dataset(compare_config, variable=self.compare['variables'], timestamp=self.compare['time']) as dataset: data = [] for v in self.compare['variables']: var = dataset.variables[v] d = dataset.get_area( np.array([self.latitude, self.longitude]), self.compare['depth'], self.compare['time'], v, self.interp, self.radius, self.neighbours, ) data.append(d) data = data[0] self.data -= data # Load bathymetry data self.bathymetry = overlays.bathymetry(self.basemap, self.latitude, self.longitude, blur=2) if self.depth != 'bottom' and self.depth != 0: if quiver_data: quiver_bathymetry = overlays.bathymetry( self.basemap, quiver_lat, quiver_lon) self.data[np.where( self.bathymetry < depth_value_map)] = np.ma.masked for d in self.quiver_data: d[np.where(quiver_bathymetry < depth_value)] = np.ma.masked for d in self.contour_data: d[np.where(self.bathymetry < depth_value_map)] = np.ma.masked else: mask = maskoceans(self.longitude, self.latitude, self.data, True, 'h', 1.25).mask self.data[~mask] = np.ma.masked for d in self.quiver_data: mask = maskoceans(self.quiver_longitude, self.quiver_latitude, d).mask d[~mask] = np.ma.masked for d in contour_data: mask = maskoceans(self.longitude, self.latitude, d).mask d[~mask] = np.ma.masked if self.area and self.filetype in ['csv', 'odv', 'txt', 'geotiff']: area_polys = [] for a in self.area: rings = [LinearRing(p) for p in a['polygons']] innerrings = [LinearRing(p) for p in a['innerrings']] polygons = [] for r in rings: inners = [] for ir in innerrings: if r.contains(ir): inners.append(ir) polygons.append(Poly(r, inners)) area_polys.append(MultiPolygon(polygons)) points = [ Point(p) for p in zip(self.latitude.ravel(), self.longitude.ravel()) ] indicies = [] for a in area_polys: indicies.append( np.where( list(map(lambda p, poly=a: poly.contains(p), points)))[0]) indicies = np.unique(np.array(indicies).ravel()) newmask = np.ones(self.data.shape, dtype=bool) newmask[np.unravel_index(indicies, newmask.shape)] = False self.data.mask |= newmask self.depth_value_map = depth_value_map
def load_data(self): with open_dataset(self.dataset_config) as dataset: if self.time < 0: self.time += len(dataset.timestamps) time = np.clip(self.time, 0, len(dataset.timestamps) - 1) for idx, v in enumerate(self.variables): var = dataset.variables[v] if not (set(var.dimensions) & set(dataset.depth_dimensions)): for potential in dataset.variables: if potential in self.variables: continue pot = dataset.variables[potential] if (set(pot.dimensions) & set(dataset.depth_dimensions)): if len(pot.dimensions) > 3: self.variables[idx] = potential.key value = parallel = perpendicular = magnitude = None variable_names = self.get_variable_names(dataset, self.variables) variable_units = self.get_variable_units(dataset, self.variables) scale_factors = self.get_variable_scale_factors( dataset, self.variables) # Load data sent from primary/Left Map if len(self.variables) > 1: # Only velocity has 2 variables v = [] for name in self.variables: v.append(dataset.variables[name]) distances, times, lat, lon, bearings = geo.path_to_points( self.points, 100) transect_pts, distance, x, dep = dataset.get_path_profile( self.points, time, self.variables[0], 100) transect_pts, distance, y, dep = dataset.get_path_profile( self.points, time, self.variables[1], 100) # Calculate vector components x = np.multiply(x, scale_factors[0]) y = np.multiply(y, scale_factors[1]) r = np.radians(np.subtract(90, bearings)) theta = np.arctan2(y, x) - r magnitude = np.sqrt(x**2 + y**2) parallel = magnitude * np.cos(theta) perpendicular = magnitude * np.sin(theta) else: # Get data for one variable transect_pts, distance, value, dep = dataset.get_path_profile( self.points, time, self.variables[0]) value = np.multiply(value, scale_factors[0]) if len(self.variables) == 2: variable_names = [ self.get_vector_variable_name(dataset, self.variables) ] variable_units = [ self.get_vector_variable_unit(dataset, self.variables) ] # If a colourmap has not been manually specified by the # Navigator... if self.cmap is None: self.cmap = colormap.find_colormap(variable_names[0]) self.timestamp = dataset.timestamps[int(time)] self.depth = dep self.depth_unit = "m" self.transect_data = { "points": transect_pts, "distance": distance, "data": value, "name": variable_names[0], "unit": variable_units[0], "parallel": parallel, "perpendicular": perpendicular, "magnitude": magnitude, } if self.surface is not None: surface_pts, surface_dist, t, surface_value = \ dataset.get_path( self.points, 0, time, self.surface, ) vc = self.dataset_config.variable[dataset.variables[ self.surface]] surface_unit = vc.unit surface_name = vc.name surface_factor = vc.scale_factor surface_value = np.multiply(surface_value, surface_factor) self.surface_data = { "config": vc, "points": surface_pts, "distance": surface_dist, "data": surface_value, "name": surface_name, "unit": surface_unit } # Load data sent from Right Map (if in compare mode) if self.compare: def interpolate_depths(data, depth_in, depth_out): output = [] for i in range(0, depth_in.shape[0]): f = interp1d( depth_in[i], data[:, i], bounds_error=False, assume_sorted=True, ) output.append( f(depth_out[i].view(np.ma.MaskedArray).filled())) return np.ma.masked_invalid(output).transpose() self.compare_config = DatasetConfig(self.compare['dataset']) with open_dataset(self.compare_config) as dataset: # Get and format date self.compare['date'] = np.clip(np.int64(self.compare['time']), 0, len(dataset.timestamps) - 1) self.compare['date'] = dataset.timestamps[int( self.compare['date'])] # 1 variable if len(self.compare['variables']) == 1: # Get and store the "nicely formatted" string for the variable name self.compare['name'] = self.get_variable_names( dataset, self.compare['variables'])[0] # Find correct colourmap if (self.compare['colormap'] == 'default'): self.compare['colormap'] = colormap.find_colormap( self.compare['name']) else: self.compare['colormap'] = colormap.find_colormap( self.compare['colormap']) climate_points, climate_distance, climate_data, cdep = \ dataset.get_path_profile(self.points, self.compare['time'], self.compare['variables'][0]) self.compare['unit'] = dataset.variables[ self.compare['variables'][0]].unit self.__fill_invalid_shift(climate_data) if (self.depth.shape != cdep.shape) or \ (self.depth != cdep).any(): # Need to interpolate the depths climate_data = interpolate_depths( climate_data, cdep, self.depth) if self.transect_data['data'] is None: self.transect_data['magnitude'] -= climate_data self.transect_data['parallel'] -= climate_data self.transect_data['perpendicular'] -= climate_data else: self.transect_data['compare_data'] = climate_data # Velocity variables else: # Get and store the "nicely formatted" string for the variable name self.compare['name'] = self.get_vector_variable_name( dataset, self.compare['variables']) climate_pts, climate_distance, climate_x, cdep = \ dataset.get_path_profile( self.points, self.compare['time'], self.compare['variables'][0], 100 ) climate_pts, climate_distance, climate_y, cdep = \ dataset.get_path_profile( self.points, self.compare['time'], self.compare['variables'][0], 100 ) climate_distances, ctimes, clat, clon, bearings = \ geo.path_to_points(self.points, 100) r = np.radians(np.subtract(90, bearings)) theta = np.arctan2(climate_y, climate_x) - r mag = np.sqrt(climate_x**2 + climate_y**2) if np.all(self.depth != cdep): theta = interpolate_depths(theta, cdep, self.depth) self.__fill_invalid_shift(theta) mag = interpolate_depths(mag, cdep, self.depth) self.__fill_invalid_shift(mag) self.compare['parallel'] = mag * np.cos(theta) self.compare['perpendicular'] = mag * np.sin(theta) """ if self.transect_data['parallel'] is None: self.transect_data['data'] -= mag else: self.transect_data['parallel'] -= climate_parallel self.transect_data['perpendicular'] -= climate_perpendicular """ # Bathymetry with Dataset(current_app.config['BATHYMETRY_FILE'], 'r') as dataset: bath_x, bath_y = bathymetry(dataset.variables['y'], dataset.variables['x'], dataset.variables['z'], self.points) self.bathymetry = {'x': bath_x, 'y': bath_y}
def scale(args): dataset_name = args.get('dataset') scale = args.get('scale') scale = [float(component) for component in scale.split(',')] variable = args.get('variable') anom = False if variable.endswith('_anom'): variable = variable[0:-5] anom = True variable = variable.split(',') with open_dataset(get_dataset_url(dataset_name)) as dataset: variable_unit = get_variable_unit(dataset_name, dataset.variables[variable[0]]) variable_name = get_variable_name(dataset_name, dataset.variables[variable[0]]) if variable_unit.startswith("Kelvin"): variable_unit = "Celsius" if anom: cmap = colormap.colormaps['anomaly'] variable_name = gettext("%s Anomaly") % variable_name else: cmap = colormap.find_colormap(variable_name) if len(variable) == 2: if not anom: cmap = colormap.colormaps.get('speed') variable_name = re.sub( r"(?i)( x | y |zonal |meridional |northward |eastward )", " ", variable_name) variable_name = re.sub(r" +", " ", variable_name) fig = plt.figure(figsize=(2, 5), dpi=75) ax = fig.add_axes([0.05, 0.05, 0.25, 0.9]) norm = matplotlib.colors.Normalize(vmin=scale[0], vmax=scale[1]) formatter = ScalarFormatter() formatter.set_powerlimits((-3, 4)) bar = ColorbarBase(ax, cmap=cmap, norm=norm, orientation='vertical', format=formatter) bar.set_label("%s (%s)" % (variable_name.title(), utils.mathtext(variable_unit)), fontsize=12) # Increase tick font size bar.ax.tick_params(labelsize=12) buf = BytesIO() plt.savefig(buf, format='png', dpi='figure', transparent=False, bbox_inches='tight', pad_inches=0.05) plt.close(fig) buf.seek(0) # Move buffer back to beginning return buf