def get_scale(dataset, variable, depth, time, projection, extent): x = np.linspace(extent[0], extent[2], 50) y = np.linspace(extent[1], extent[3], 50) xx, yy = np.meshgrid(x, y) dest = Proj(init=projection) lon, lat = dest(xx, yy, inverse=True) variables_anom = variable.split(",") variables = [re.sub('_anom$', '', v) for v in variables_anom] with open_dataset(get_dataset_url(dataset)) as ds: timestamp = ds.timestamps[time] d = ds.get_area( np.array([lat, lon]), depth, time, variables[0] ) if len(variables) > 1: d0 = d d1 = ds.get_area( np.array([lat, lon]), depth, time, variables[1] ) d = np.sqrt(d0 ** 2 + d1 ** 2) variable_unit = get_variable_unit(dataset, ds.variables[variables[0]]) if variable_unit.startswith("Kelvin"): variable_unit = "Celsius" d = np.add(d, -273.15) if variables != variables_anom: with open_dataset(get_dataset_climatology(dataset), 'r') as ds: c = ds.get_area( np.array([lat, lon]), depth, timestamp.month - 1, variables[0] ) if len(variables) > 1: c0 = c c1 = ds.get_area( np.array([lat, lon]), depth, timestamp.month - 1, variables[1] ) c = np.sqrt(c0 ** 2 + c1 ** 2) d = d - c m = max(abs(d.min()), abs(d.max())) return -m, m return d.min(), d.max()
def get_point_data(dataset, variable, time, depth, location): variables_anom = variable.split(",") variables = [re.sub('_anom$', '', v) for v in variables_anom] data = [] names = [] units = [] with open_dataset(get_dataset_url(dataset)) as ds: timestamp = ds.timestamps[time] for v in variables: d = ds.get_point( location[0], location[1], depth, time, v ) variable_name = get_variable_name(dataset, ds.variables[v]) variable_unit = get_variable_unit(dataset, ds.variables[v]) if variable_unit.startswith("Kelvin"): variable_unit = "Celsius" d = np.add(d, -273.15) data.append(d) names.append(variable_name) units.append(variable_unit) if variables != variables_anom: with open_dataset(get_dataset_climatology(dataset)) as ds: for idx, v in enumerate(variables): d = ds.get_point( location[0], location[1], depth, timestamp.month, v ) data[idx] = data[idx] - d names[idx] = names[idx] + " Anomaly" result = { 'value': map(lambda f: '%s' % float('%.4g' % f), data), 'location': map(lambda f: round(f, 4), location), 'name': names, 'units': units, } return result
def subtract_climatology(self, data, timestamp): if self.variables != self.variables_anom: with Dataset( get_dataset_climatology(self.dataset_name), 'r' ) as dataset: cli = self.get_data( dataset, self.variables, timestamp.month - 1 ) for idx, v in enumerate(self.variables): if v != self.variables_anom[idx]: data[:, idx, :] = \ data[:, idx, :] - cli[:, idx, :] return data
def get_scale(dataset, variable, depth, time, projection, extent, interp, radius, neighbours): x = np.linspace(extent[0], extent[2], 50) y = np.linspace(extent[1], extent[3], 50) xx, yy = np.meshgrid(x, y) dest = Proj(init=projection) lon, lat = dest(xx, yy, inverse=True) variables_anom = variable.split(",") variables = [re.sub('_anom$', '', v) for v in variables_anom] with open_dataset(get_dataset_url(dataset)) as ds: timestamp = ds.timestamps[time] d = ds.get_area(np.array([lat, lon]), depth, time, variables[0], interp, radius, neighbours) if len(variables) > 1: d0 = d d1 = ds.get_area(np.array([lat, lon]), depth, time, variables[1], interp, radius, neighbours) d = np.sqrt(d0**2 + d1**2) variable_unit = get_variable_unit(dataset, ds.variables[variables[0]]) if variable_unit.startswith("Kelvin"): variable_unit = "Celsius" d = np.add(d, -273.15) if variables != variables_anom: with open_dataset(get_dataset_climatology(dataset), 'r') as ds: c = ds.get_area(np.array([lat, lon]), depth, timestamp.month - 1, variables[0], interp, radius, neighbours) if len(variables) > 1: c0 = c c1 = ds.get_area(np.array([lat, lon]), depth, timestamp.month - 1, variables[1], interp, radius, neighbours) c = np.sqrt(c0**2 + c1**2) d = d - c m = max(abs(d.min()), abs(d.max())) return -m, m return d.min(), d.max()
def test_get_dataset_misc(self, m): m.return_value = { "dataset": { "url": "the_url", "attribution": "My attribution <b>bold</b>", "climatology": "climatology_url", "cache": 5, } } self.assertEqual(util.get_dataset_url("dataset"), "the_url") self.assertEqual(util.get_dataset_climatology("dataset"), "climatology_url") self.assertEqual(util.get_dataset_attribution("dataset"), "My attribution bold") self.assertEqual(util.get_dataset_cache("dataset"), 5) m.return_value = {"dataset2": {}} self.assertEqual(util.get_dataset_cache("dataset2"), None)
def test_get_dataset_misc(self, m): m.return_value = { "dataset": { "url": "the_url", "attribution": "My attribution <b>bold</b>", "climatology": "climatology_url", "cache": 5, } } self.assertEqual(util.get_dataset_url("dataset"), "the_url") self.assertEqual( util.get_dataset_climatology("dataset"), "climatology_url") self.assertEqual( util.get_dataset_attribution("dataset"), "My attribution bold") self.assertEqual(util.get_dataset_cache("dataset"), 5) m.return_value = { "dataset2": { } } self.assertEqual(util.get_dataset_cache("dataset2"), None)
def plot(projection, x, y, z, args): lat, lon = get_latlon_coords(projection, x, y, z) if len(lat.shape) == 1: lat, lon = np.meshgrid(lat, lon) dataset_name = args.get('dataset') variable = args.get('variable') if variable.endswith('_anom'): variable = variable[0:-5] anom = True else: anom = False variable = variable.split(',') depth = args.get('depth') scale = args.get('scale') scale = [float(component) for component in scale.split(',')] data = [] with open_dataset(get_dataset_url(dataset_name)) as dataset: if args.get('time') is None or (type(args.get('time')) == str and len(args.get('time')) == 0): time = -1 else: time = int(args.get('time')) t_len = len(dataset.timestamps) while time >= t_len: time -= t_len while time < 0: time += len(dataset.timestamps) timestamp = dataset.timestamps[time] for v in variable: data.append(dataset.get_area(np.array([lat, lon]), depth, time, v)) variable_name = get_variable_name(dataset_name, dataset.variables[variable[0]]) variable_unit = get_variable_unit(dataset_name, dataset.variables[variable[0]]) scale_factor = get_variable_scale_factor( dataset_name, dataset.variables[variable[0]]) if anom: cmap = colormap.colormaps['anomaly'] else: cmap = colormap.find_colormap(variable_name) if depth != 'bottom': depthm = dataset.depths[depth] else: depthm = 0 if scale_factor != 1.0: for idx, val in enumerate(data): data[idx] = np.multiply(val, scale_factor) if variable_unit.startswith("Kelvin"): variable_unit = "Celsius" for idx, val in enumerate(data): data[idx] = np.add(val, -273.15) if len(data) == 1: data = data[0] if len(data) == 2: data = np.sqrt(data[0]**2 + data[1]**2) if not anom: cmap = colormap.colormaps.get('speed') if anom: with open_dataset(get_dataset_climatology(dataset_name)) as dataset: a = dataset.get_area(np.array([lat, lon]), depth, timestamp.month - 1, v) data = data - a f, fname = tempfile.mkstemp() os.close(f) data = data.transpose() xpx = x * 256 ypx = y * 256 with Dataset(ETOPO_FILE % (projection, z), 'r') as dataset: bathymetry = dataset["z"][ypx:(ypx + 256), xpx:(xpx + 256)] bathymetry = gaussian_filter(bathymetry, 0.5) data[np.where(bathymetry > -depthm)] = np.ma.masked sm = matplotlib.cm.ScalarMappable(matplotlib.colors.Normalize( vmin=scale[0], vmax=scale[1]), cmap=cmap) img = sm.to_rgba(np.squeeze(data)) im = Image.fromarray((img * 255.0).astype(np.uint8)) im.save(fname, format='png', optimize=True) with open(fname, 'r') as f: buf = f.read() os.remove(fname) return buf
def load_data(self): if self.projection == 'EPSG:32661': blat = min(self.bounds[0], self.bounds[2]) blat = 5 * np.floor(blat / 5) self.basemap = basemap.load_map('npstere', (blat, 0), None, None) elif self.projection == 'EPSG:3031': blat = max(self.bounds[0], self.bounds[2]) blat = 5 * np.ceil(blat / 5) self.basemap = basemap.load_map('spstere', (blat, 180), None, None) else: distance = VincentyDistance() height = distance.measure( (self.bounds[0], self.centroid[1]), (self.bounds[2], self.centroid[1])) * 1000 * 1.25 width = distance.measure( (self.centroid[0], self.bounds[1]), (self.centroid[0], self.bounds[3])) * 1000 * 1.25 self.basemap = basemap.load_map('lcc', self.centroid, height, width) if self.basemap.aspect < 1: gridx = 500 gridy = int(500 * self.basemap.aspect) else: gridy = 500 gridx = int(500 / self.basemap.aspect) self.longitude, self.latitude = self.basemap.makegrid(gridx, gridy) with open_dataset(get_dataset_url(self.dataset_name)) as dataset: if self.time < 0: self.time += len(dataset.timestamps) self.time = np.clip(self.time, 0, len(dataset.timestamps) - 1) self.variable_unit = self.get_variable_units( dataset, self.variables)[0] self.variable_name = self.get_variable_names( dataset, self.variables)[0] scale_factor = self.get_variable_scale_factors( dataset, self.variables)[0] if self.cmap is None: if len(self.variables) == 1: self.cmap = colormap.find_colormap(self.variable_name) else: self.cmap = colormap.colormaps.get('speed') if len(self.variables) == 2: self.variable_name = self.vector_name(self.variable_name) if self.depth == 'bottom': depth_value = 'Bottom' else: self.depth = np.clip(int(self.depth), 0, len(dataset.depths) - 1) depth_value = dataset.depths[self.depth] data = [] allvars = [] for v in self.variables: var = dataset.variables[v] allvars.append(v) if self.filetype in ['csv', 'odv', 'txt']: d, depth_value = dataset.get_area(np.array( [self.latitude, self.longitude]), self.depth, self.time, v, return_depth=True) else: d = dataset.get_area( np.array([self.latitude, self.longitude]), self.depth, self.time, v) d = np.multiply(d, scale_factor) self.variable_unit, d = self.kelvin_to_celsius( self.variable_unit, d) data.append(d) if self.filetype not in ['csv', 'odv', 'txt']: if len(var.dimensions) == 3: self.depth_label = "" elif self.depth == 'bottom': self.depth_label = " at Bottom" else: self.depth_label = " at " + \ str(int(np.round(depth_value))) + " m" if len(data) == 2: data[0] = np.sqrt(data[0]**2 + data[1]**2) self.data = data[0] quiver_data = [] if self.quiver is not None and \ self.quiver['variable'] != '' and \ self.quiver['variable'] != 'none': for v in self.quiver['variable'].split(','): allvars.append(v) var = dataset.variables[v] quiver_unit = get_variable_unit(self.dataset_name, var) quiver_name = get_variable_name(self.dataset_name, var) quiver_lon, quiver_lat = self.basemap.makegrid(50, 50) d = dataset.get_area(np.array([quiver_lat, quiver_lon]), self.depth, self.time, v) quiver_data.append(d) self.quiver_name = self.vector_name(quiver_name) self.quiver_longitude = quiver_lon self.quiver_latitude = quiver_lat self.quiver_unit = quiver_unit self.quiver_data = quiver_data if all( map(lambda v: len(dataset.variables[v].dimensions) == 3, allvars)): self.depth = 0 contour_data = [] if self.contour is not None and \ self.contour['variable'] != '' and \ self.contour['variable'] != 'none': d = dataset.get_area(np.array([self.latitude, self.longitude]), self.depth, self.time, self.contour['variable']) contour_unit = get_variable_unit( self.dataset_name, dataset.variables[self.contour['variable']]) contour_name = get_variable_name( self.dataset_name, dataset.variables[self.contour['variable']]) contour_factor = get_variable_scale_factor( self.dataset_name, dataset.variables[self.contour['variable']]) contour_unit, d = self.kelvin_to_celsius(contour_unit, d) d = np.multiply(d, contour_factor) contour_data.append(d) self.contour_unit = contour_unit self.contour_name = contour_name self.contour_data = contour_data self.timestamp = dataset.timestamps[self.time] if self.variables != self.variables_anom: self.variable_name += " Anomaly" with open_dataset(get_dataset_climatology( self.dataset_name)) as dataset: data = [] for v in self.variables: var = dataset.variables[v] d = dataset.get_area( np.array([self.latitude, self.longitude]), self.depth, self.timestamp.month - 1, v) data.append(d) if len(data) == 2: data = np.sqrt(data[0]**2 + data[1]**2) else: data = data[0] u, data = self.kelvin_to_celsius( dataset.variables[self.variables[0]].unit, data) self.data -= data # Load bathymetry data self.bathymetry = overlays.bathymetry(self.basemap, self.latitude, self.longitude, blur=2) if self.depth != 'bottom' and self.depth != 0: if len(quiver_data) > 0: quiver_bathymetry = overlays.bathymetry( self.basemap, quiver_lat, quiver_lon) self.data[np.where(self.bathymetry < depth_value)] = np.ma.masked for d in self.quiver_data: d[np.where(quiver_bathymetry < depth_value)] = np.ma.masked for d in self.contour_data: d[np.where(self.bathymetry < depth_value)] = np.ma.masked else: mask = maskoceans(self.longitude, self.latitude, self.data).mask self.data[~mask] = np.ma.masked for d in self.quiver_data: mask = maskoceans(self.quiver_longitude, self.quiver_latitude, d).mask d[~mask] = np.ma.masked for d in contour_data: mask = maskoceans(self.longitude, self.latitude, d).mask d[~mask] = np.ma.masked if self.area and self.filetype in ['csv', 'odv', 'txt', 'geotiff']: area_polys = [] for a in self.area: rings = [LinearRing(p) for p in a['polygons']] innerrings = [LinearRing(p) for p in a['innerrings']] polygons = [] for r in rings: inners = [] for ir in innerrings: if r.contains(ir): inners.append(ir) polygons.append(Poly(r, inners)) area_polys.append(MultiPolygon(polygons)) points = [ Point(p) for p in zip(self.latitude.ravel(), self.longitude.ravel()) ] indicies = [] for a in area_polys: indicies.append( np.where(map(lambda p, poly=a: poly.contains(p), points))[0]) indicies = np.unique(np.array(indicies).ravel()) newmask = np.ones(self.data.shape, dtype=bool) newmask[np.unravel_index(indicies, newmask.shape)] = False self.data.mask |= newmask self.depth_value = depth_value
def load_data(self): if self.projection == 'EPSG:32661': blat = min(self.bounds[0], self.bounds[2]) blat = 5 * np.floor(blat / 5) self.basemap = basemap.load_map('npstere', (blat, 0), None, None) elif self.projection == 'EPSG:3031': blat = max(self.bounds[0], self.bounds[2]) blat = 5 * np.ceil(blat / 5) self.basemap = basemap.load_map('spstere', (blat, 180), None, None) else: distance = VincentyDistance() height = distance.measure( (self.bounds[0], self.centroid[1]), (self.bounds[2], self.centroid[1]) ) * 1000 * 1.25 width = distance.measure( (self.centroid[0], self.bounds[1]), (self.centroid[0], self.bounds[3]) ) * 1000 * 1.25 self.basemap = basemap.load_map( 'lcc', self.centroid, height, width ) if self.basemap.aspect < 1: gridx = 500 gridy = int(500 * self.basemap.aspect) else: gridy = 500 gridx = int(500 / self.basemap.aspect) self.longitude, self.latitude = self.basemap.makegrid(gridx, gridy) with open_dataset(get_dataset_url(self.dataset_name)) as dataset: if self.time < 0: self.time += len(dataset.timestamps) self.time = np.clip(self.time, 0, len(dataset.timestamps) - 1) self.variable_unit = self.get_variable_units( dataset, self.variables )[0] self.variable_name = self.get_variable_names( dataset, self.variables )[0] scale_factor = self.get_variable_scale_factors( dataset, self.variables )[0] if self.cmap is None: if len(self.variables) == 1: self.cmap = colormap.find_colormap(self.variable_name) else: self.cmap = colormap.colormaps.get('speed') if len(self.variables) == 2: self.variable_name = self.vector_name(self.variable_name) if self.depth == 'bottom': depth_value = 'Bottom' else: self.depth = np.clip( int(self.depth), 0, len(dataset.depths) - 1) depth_value = dataset.depths[self.depth] data = [] allvars = [] for v in self.variables: var = dataset.variables[v] allvars.append(v) if self.filetype in ['csv', 'odv', 'txt']: d, depth_value = dataset.get_area( np.array([self.latitude, self.longitude]), self.depth, self.time, v, return_depth=True ) else: d = dataset.get_area( np.array([self.latitude, self.longitude]), self.depth, self.time, v ) d = np.multiply(d, scale_factor) self.variable_unit, d = self.kelvin_to_celsius( self.variable_unit, d) data.append(d) if self.filetype not in ['csv', 'odv', 'txt']: if len(var.dimensions) == 3: self.depth_label = "" elif self.depth == 'bottom': self.depth_label = " at Bottom" else: self.depth_label = " at " + \ str(int(np.round(depth_value))) + " m" if len(data) == 2: data[0] = np.sqrt(data[0] ** 2 + data[1] ** 2) self.data = data[0] quiver_data = [] if self.quiver is not None and \ self.quiver['variable'] != '' and \ self.quiver['variable'] != 'none': for v in self.quiver['variable'].split(','): allvars.append(v) var = dataset.variables[v] quiver_unit = get_variable_unit(self.dataset_name, var) quiver_name = get_variable_name(self.dataset_name, var) quiver_lon, quiver_lat = self.basemap.makegrid(50, 50) d = dataset.get_area( np.array([quiver_lat, quiver_lon]), self.depth, self.time, v ) quiver_data.append(d) self.quiver_name = self.vector_name(quiver_name) self.quiver_longitude = quiver_lon self.quiver_latitude = quiver_lat self.quiver_unit = quiver_unit self.quiver_data = quiver_data if all(map(lambda v: len(dataset.variables[v].dimensions) == 3, allvars)): self.depth = 0 contour_data = [] if self.contour is not None and \ self.contour['variable'] != '' and \ self.contour['variable'] != 'none': d = dataset.get_area( np.array([self.latitude, self.longitude]), self.depth, self.time, self.contour['variable'] ) contour_unit = get_variable_unit( self.dataset_name, dataset.variables[self.contour['variable']]) contour_name = get_variable_name( self.dataset_name, dataset.variables[self.contour['variable']]) contour_factor = get_variable_scale_factor( self.dataset_name, dataset.variables[self.contour['variable']]) contour_unit, d = self.kelvin_to_celsius(contour_unit, d) d = np.multiply(d, contour_factor) contour_data.append(d) self.contour_unit = contour_unit self.contour_name = contour_name self.contour_data = contour_data self.timestamp = dataset.timestamps[self.time] if self.variables != self.variables_anom: self.variable_name += " Anomaly" with open_dataset( get_dataset_climatology(self.dataset_name) ) as dataset: data = [] for v in self.variables: var = dataset.variables[v] d = dataset.get_area( np.array([self.latitude, self.longitude]), self.depth, self.timestamp.month - 1, v ) data.append(d) if len(data) == 2: data = np.sqrt(data[0] ** 2 + data[1] ** 2) else: data = data[0] u, data = self.kelvin_to_celsius( dataset.variables[self.variables[0]].unit, data) self.data -= data # Load bathymetry data self.bathymetry = overlays.bathymetry( self.basemap, self.latitude, self.longitude, blur=2 ) if self.depth != 'bottom' and self.depth != 0: if len(quiver_data) > 0: quiver_bathymetry = overlays.bathymetry( self.basemap, quiver_lat, quiver_lon) self.data[np.where(self.bathymetry < depth_value)] = np.ma.masked for d in self.quiver_data: d[np.where(quiver_bathymetry < depth_value)] = np.ma.masked for d in self.contour_data: d[np.where(self.bathymetry < depth_value)] = np.ma.masked else: mask = maskoceans(self.longitude, self.latitude, self.data).mask self.data[~mask] = np.ma.masked for d in self.quiver_data: mask = maskoceans( self.quiver_longitude, self.quiver_latitude, d).mask d[~mask] = np.ma.masked for d in contour_data: mask = maskoceans(self.longitude, self.latitude, d).mask d[~mask] = np.ma.masked if self.area and self.filetype in ['csv', 'odv', 'txt', 'geotiff']: area_polys = [] for a in self.area: rings = [LinearRing(p) for p in a['polygons']] innerrings = [LinearRing(p) for p in a['innerrings']] polygons = [] for r in rings: inners = [] for ir in innerrings: if r.contains(ir): inners.append(ir) polygons.append(Poly(r, inners)) area_polys.append(MultiPolygon(polygons)) points = [Point(p) for p in zip(self.latitude.ravel(), self.longitude.ravel())] indicies = [] for a in area_polys: indicies.append(np.where( map( lambda p, poly=a: poly.contains(p), points ) )[0]) indicies = np.unique(np.array(indicies).ravel()) newmask = np.ones(self.data.shape, dtype=bool) newmask[np.unravel_index(indicies, newmask.shape)] = False self.data.mask |= newmask self.depth_value = depth_value
def vars_query(): data = [] if 'dataset' in request.args: dataset = request.args['dataset'] if get_dataset_climatology(dataset) != "" and 'anom' in request.args: with open_dataset(get_dataset_climatology(dataset)) as ds: climatology_variables = map(str, ds.variables) else: climatology_variables = [] three_d = '3d_only' in request.args with open_dataset(get_dataset_url(dataset)) as ds: if 'vectors_only' not in request.args: for v in ds.variables: if ('time_counter' in v.dimensions or 'time' in v.dimensions) \ and ('y' in v.dimensions or 'yc' in v.dimensions or 'node' in v.dimensions or 'nele' in v.dimensions or 'latitude' in v.dimensions or 'lat' in v.dimensions): if three_d and not (set(ds.depth_dimensions) & set(v.dimensions)): continue else: if not is_variable_hidden(dataset, v): data.append({ 'id': v.key, 'value': get_variable_name(dataset, v), 'scale': get_variable_scale(dataset, v) }) if v.key in climatology_variables: data.append({ 'id': v.key + "_anom", 'value': get_variable_name(dataset, v) + " Anomaly", 'scale': [-10, 10] }) VECTOR_MAP = { 'vozocrtx': 'vozocrtx,vomecrty', 'itzocrtx': 'itzocrtx,itmecrty', 'iicevelu': 'iicevelu,iicevelv', 'u_wind': 'u_wind,v_wind', 'u': 'u,v', 'ua': 'ua,va', 'u-component_of_wind_height_above_ground': 'u-component_of_wind_height_above_ground,v-component_of_wind_height_above_ground' } if 'vectors' in request.args or 'vectors_only' in request.args: rxp = r"(?i)( x | y |zonal |meridional |northward |eastward)" for key, value in VECTOR_MAP.iteritems(): if key in ds.variables: n = get_variable_name(dataset, ds.variables[key]) data.append({ 'id': value, 'value': re.sub(r" +", " ", re.sub(rxp, " ", n)), 'scale': [ 0, get_variable_scale(dataset, ds.variables[key])[1] ] }) data = sorted(data, key=lambda k: k['value']) resp = jsonify(data) return resp
def load_data(self): with open_dataset(get_dataset_url(self.dataset_name)) as dataset: if self.time < 0: self.time += len(dataset.timestamps) time = np.clip(self.time, 0, len(dataset.timestamps) - 1) for idx, v in enumerate(self.variables): var = dataset.variables[v] if not (set(var.dimensions) & set(dataset.depth_dimensions)): for potential in dataset.variables: if potential in self.variables: continue pot = dataset.variables[potential] if (set(pot.dimensions) & set(dataset.depth_dimensions)): if len(pot.shape) > 3: self.variables[idx] = potential self.variables_anom[idx] = potential value = parallel = perpendicular = None variable_names = self.get_variable_names(dataset, self.variables) variable_units = self.get_variable_units(dataset, self.variables) scale_factors = self.get_variable_scale_factors(dataset, self.variables) if len(self.variables) > 1: v = [] for name in self.variables: v.append(dataset.variables[name]) distances, times, lat, lon, bearings = geo.path_to_points( self.points, 100 ) transect_pts, distance, x, dep = dataset.get_path_profile( self.points, time, self.variables[0], 100) transect_pts, distance, y, dep = dataset.get_path_profile( self.points, time, self.variables[1], 100) x = np.multiply(x, scale_factors[0]) y = np.multiply(y, scale_factors[1]) r = np.radians(np.subtract(90, bearings)) theta = np.arctan2(y, x) - r mag = np.sqrt(x ** 2 + y ** 2) parallel = mag * np.cos(theta) perpendicular = mag * np.sin(theta) else: transect_pts, distance, value, dep = dataset.get_path_profile( self.points, time, self.variables[0]) value = np.multiply(value, scale_factors[0]) variable_units[0], value = self.kelvin_to_celsius( variable_units[0], value ) if len(self.variables) == 2: variable_names[0] = self.vector_name(variable_names[0]) if self.cmap is None: self.cmap = colormap.find_colormap(variable_names[0]) self.timestamp = dataset.timestamps[int(time)] self.depth = dep self.depth_unit = "m" self.transect_data = { "points": transect_pts, "distance": distance, "data": value, "name": variable_names[0], "unit": variable_units[0], "parallel": parallel, "perpendicular": perpendicular, } if self.surface is not None: surface_pts, surface_dist, t, surface_value = \ dataset.get_path( self.points, 0, time, self.surface, ) surface_unit = get_variable_unit( self.dataset_name, dataset.variables[self.surface] ) surface_name = get_variable_name( self.dataset_name, dataset.variables[self.surface] ) surface_factor = get_variable_scale_factor( self.dataset_name, dataset.variables[self.surface] ) surface_value = np.multiply(surface_value, surface_factor) surface_unit, surface_value = self.kelvin_to_celsius( surface_unit, surface_value ) self.surface_data = { "points": surface_pts, "distance": surface_dist, "data": surface_value, "name": surface_name, "unit": surface_unit } if self.variables != self.variables_anom: with open_dataset( get_dataset_climatology(self.dataset_name) ) as dataset: if self.variables[0] in dataset.variables: if len(self.variables) == 1: climate_points, climate_distance, climate_data = \ dataset.get_path_profile(self.points, self.timestamp.month - 1, self.variables[0]) u, climate_data = self.kelvin_to_celsius( dataset.variables[self.variables[0]].unit, climate_data ) self.transect_data['data'] -= - climate_data else: climate_pts, climate_distance, climate_x, cdep = \ dataset.get_path_profile( self.points, self.timestamp.month - 1, self.variables[0], 100 ) climate_pts, climate_distance, climate_y, cdep = \ dataset.get_path_profile( self.points, self.timestamp.month - 1, self.variables[0], 100 ) climate_distances, ctimes, clat, clon, bearings = \ geo.path_to_points(self.points, 100) r = np.radians(np.subtract(90, bearings)) theta = np.arctan2(y, x) - r mag = np.sqrt(x ** 2 + y ** 2) climate_parallel = mag * np.cos(theta) climate_perpendicular = mag * np.sin(theta) self.transect_data['parallel'] -= climate_parallel self.transect_data[ 'perpendicular'] -= climate_perpendicular # Bathymetry with Dataset(app.config['BATHYMETRY_FILE'], 'r') as dataset: bath_x, bath_y = bathymetry( dataset.variables['y'], dataset.variables['x'], dataset.variables['z'], self.points) self.bathymetry = { 'x': bath_x, 'y': bath_y }
def plot(projection, x, y, z, args): lat, lon = get_latlon_coords(projection, x, y, z) if len(lat.shape) == 1: lat, lon = np.meshgrid(lat, lon) dataset_name = args.get('dataset') variable = args.get('variable') if variable.endswith('_anom'): variable = variable[0:-5] anom = True else: anom = False variable = variable.split(',') depth = args.get('depth') scale = args.get('scale') scale = [float(component) for component in scale.split(',')] data = [] with open_dataset(get_dataset_url(dataset_name)) as dataset: if args.get('time') is None or (type(args.get('time')) == str and len(args.get('time')) == 0): time = -1 else: time = int(args.get('time')) t_len = len(dataset.timestamps) while time >= t_len: time -= t_len while time < 0: time += len(dataset.timestamps) timestamp = dataset.timestamps[time] for v in variable: data.append(dataset.get_area( np.array([lat, lon]), depth, time, v )) variable_name = get_variable_name(dataset_name, dataset.variables[variable[0]]) variable_unit = get_variable_unit(dataset_name, dataset.variables[variable[0]]) scale_factor = get_variable_scale_factor( dataset_name, dataset.variables[variable[0]] ) if anom: cmap = colormap.colormaps['anomaly'] else: cmap = colormap.find_colormap(variable_name) if depth != 'bottom': depthm = dataset.depths[depth] else: depthm = 0 if scale_factor != 1.0: for idx, val in enumerate(data): data[idx] = np.multiply(val, scale_factor) if variable_unit.startswith("Kelvin"): variable_unit = "Celsius" for idx, val in enumerate(data): data[idx] = np.add(val, -273.15) if len(data) == 1: data = data[0] if len(data) == 2: data = np.sqrt(data[0] ** 2 + data[1] ** 2) if not anom: cmap = colormap.colormaps.get('speed') if anom: with open_dataset(get_dataset_climatology(dataset_name)) as dataset: a = dataset.get_area( np.array([lat, lon]), depth, timestamp.month - 1, v ) data = data - a f, fname = tempfile.mkstemp() os.close(f) data = data.transpose() xpx = x * 256 ypx = y * 256 with Dataset(ETOPO_FILE % (projection, z), 'r') as dataset: bathymetry = dataset["z"][ypx:(ypx + 256), xpx:(xpx + 256)] bathymetry = gaussian_filter(bathymetry, 0.5) data[np.where(bathymetry > -depthm)] = np.ma.masked sm = matplotlib.cm.ScalarMappable( matplotlib.colors.Normalize(vmin=scale[0], vmax=scale[1]), cmap=cmap) img = sm.to_rgba(np.squeeze(data)) im = Image.fromarray((img * 255.0).astype(np.uint8)) im.save(fname, format='png', optimize=True) with open(fname, 'r') as f: buf = f.read() os.remove(fname) return buf