def _load_ds(self, _id): self.loaded = False with self.app.app_context(): ds_sub_basins = load_file(_id, "sub_basins") # load sub_basins file if type(ds_sub_basins) == type( None ): # if sub_basins file has not been created yet, then initiate with default config with self.app.app_context(): ds = load_file(_id, self.file_type) # assert ds.Bathymetry.shape == (149, 182) # generates error if ds.Bathymetry.shape has dim ~= (149,182) ds.Bathymetry.values = self._default_ocean_values( ds.Bathymetry.values, ds.Bathymetry.shape) ds = ds.rename({"Bathymetry": "Oceans"}) else: # else if sub_basins file has been created, then use its data ds_sub_basins["pacmsk"] = (('y', 'x'), numpy.where(ds_sub_basins.pacmsk == 1, 2, 0)) ds_sub_basins["indmsk"] = (('y', 'x'), numpy.where(ds_sub_basins.indmsk == 1, 3, 0)) ds = xr.Dataset({}) ds['Oceans'] = (ds_sub_basins.atlmsk + ds_sub_basins.pacmsk + ds_sub_basins.indmsk).astype(numpy.float64) ds['nav_lon'] = ds_sub_basins.navlon ds['nav_lat'] = ds_sub_basins.navlat # If lat and lon are in varaibles move them to coords d = {} for var in ds.data_vars: if "lat" in var.lower() or "lon" in var.lower(): d[var] = var ds = ds.set_coords(d) self._lat_lon_ori = d self.curvilinear_coordinates = None number_coordinates_in_system = len( list(ds.coords.variables.values())[0].dims) # Standard Grid if number_coordinates_in_system == 1: pass # Curvilinear coordinates elif number_coordinates_in_system == 2: dims = list(ds[list(ds.coords)[0]].dims) # Store the true coordinates for export self.curvilinear_coordinates = list(ds.coords) # Add the dimension into the coordinates this results in an ij indexing ds.coords[dims[0]] = ds[dims[0]] ds.coords[dims[1]] = ds[dims[1]] # Remove the curvilinear coordinates from the original coordinates ds = ds.reset_coords() else: raise ValueError("Unknown number of Coordinates") self.ds = ds attributes = list(ds.keys()) self.attribute.options = attributes self.attribute.value = attributes[0] self._original_ds = ds.copy(deep=True) self.loaded = True return True
def routing(body): app = create_app() topo_variable = body["topo_var"] _id = body["id"] save_routing = True save_bathy = True save_soils = True save_topo_high_res = True if body.get("next_step_only", False): save_bathy = False # Load file with app.app_context(): ds = load_file(_id, "raw") ds_orca = load_file(_id, "paleorca") lon, lat = get_lon_lat_names(_id) latitudes = ds[lat].values topography = ds[topo_variable].values ds_routing, ds_bathy, ds_soils, ds_topo_high_res = run_routines( topography, latitudes, ds_orca) with app.app_context(): if save_routing: save_revision(_id, ds_routing, "routing") if save_bathy: save_revision(_id, ds_bathy, "bathy") if save_soils: save_revision(_id, ds_soils, "soils") if save_topo_high_res: save_revision(_id, ds_topo_high_res, "topo_high_res")
def pft(body): app = create_app() _id = body["id"] data = json.loads(body["data"]) resp_array = numpy.array(data["dataArray"]) # Make sure the data array is in the expected format # First col = cutoff latitudes # Next 13 cols are pft types assert len(resp_array[0] == 14) pft_values = resp_array[:, 1:] latitudes = resp_array[:, 0] # Make sure 90 is the last value assert latitudes[-1] == 90 # Load routing file with final topography with app.app_context(): ds = load_file(_id, "routing") assert set(ds.dims) == set(("x", "y")) assert len(ds.coords) == 2 # The PFT values are on a 360 x 720 grid # So we need to interpolate the values onto this grid lat_vals = numpy.arange(0, 180, 0.5) lon_vals = numpy.arange(0, 360, 0.5) ds = ds.interp({"y": lat_vals, "x": lon_vals}) topo = ds.topo.values ds = generate_pft_netcdf(topo, latitudes, pft_values) with app.app_context(): save_revision(_id, ds, "pft")
def revision_comparison(_id, file_type): try: ds_latest = load_file(_id, file_type, -1) ds_previous = load_file(_id, file_type, -2) except IndexError: flash("Not enough revisions") return redirect(url_for("app.steps", _id=_id)) ds = ds_latest - ds_previous lon, lat = get_lon_lat_names(_id) plot = ds.hvplot(x=lon, y=lat).opts(responsive=True, cmap="terrain") plot = hv.render(plot, backend="bokeh") plot.sizing_mode = "scale_width" script, div = components(plot) return render_template("app/map.html", script=script, div=div, data_file_id=_id)
def file_info(_id, file_type): ds = load_file(_id, file_type) dataset_info = ds._repr_html_().replace("<div class='xr-wrap' hidden>", "<div class='xr-wrap'>") extra_info = get_info(_id, file_type) return render_template("app/file_info.html", file_info=dataset_info, extra_info=extra_info)
def ahmcoef(body): app = create_app() _id = body["id"] with app.app_context(): ds = load_file(_id, "bathy") ds_out = create_ahmcoef(ds) with app.app_context(): save_revision(_id, ds_out, "ahmcoef")
def heatflow(body): app = create_app() _id = body["id"] with app.app_context(): ds = load_file(_id, "bathy") ds_out = create_heatflow(ds) with app.app_context(): save_revision(_id, ds_out, "heatflow")
def map(_id): ds = load_file(_id) lon, lat = get_lon_lat_names(_id) plot = ds.hvplot(x=lon, y=lat).opts(responsive=True, cmap="terrain") plot = hv.render(plot, backend="bokeh") plot.sizing_mode = "scale_width" script, div = components(plot) return render_template("app/map.html", script=script, div=div, data_file_id=_id)
def steps(_id): data_file_name = get_filename(_id) ds = load_file(_id, file_type="raw", revision=0) file_info = ds._repr_html_() file_info = file_info.replace("<div class='xr-wrap' hidden>", "<div class='xr-wrap'>").replace( "type='checkbox' checked", "type='checkbox'") return render_template( "app/steps.html", file_info=file_info, data_file_name=data_file_name, _id=_id, )
def regrid(body): app = create_app() limits = body["limits"] lon_step = float(body["Longitude Step"]) lat_step = float(body["Latitude Step"]) interpolator = body["interpolator"] _id = body["id"] # Load file with app.app_context(): ds = load_file(_id, "raw") lon, lat = get_lon_lat_names(_id) # Extremities new_values = [] # Limits default_limits = [180, 90] for coord, step, default_limit in zip([lon, lat], [lon_step, lat_step], default_limits): if limits == "default": lower = -default_limit upper = default_limit elif limits == "data": # Get sorted values sorted_vals = numpy.sort(numpy.unique(ds[coord])) lower = ds[coord].min() - (sorted_vals[1] - sorted_vals[0]) / 2.0 upper = ds[coord].max() + (sorted_vals[-1] - sorted_vals[-2]) / 2.0 else: raise AttributeError("Unknown data type passed from UI") min_val = lower + step / 2.0 max_val = upper + step / 2.0 # TODO maybe we should use numpy.linspace here? new_values.append(numpy.arange(min_val, max_val, step)) # Interpolate data file interp_options = { lon: new_values[0], lat: new_values[1], } ds = ds.interp(interp_options, method=interpolator, kwargs=dict(fill_value=None)) print(f" [x] {datetime.now()} interpolation finished", flush=True) # Save file with app.app_context(): save_revision(_id, ds, "raw")
def view_database_file(_id, file_type): ds = load_file(_id, file_type) d = {} for data_var in ds.data_vars: d[data_var] = [ds[data_var].values] d["to_plot"] = [ds[list(ds.data_vars)[0]].values] source = ColumnDataSource(d) callback = CustomJS( args=dict(source=source), code=""" var data = source.data; data['to_plot'] = data[cb_obj.value]; source.change.emit(); """, ) select = Select(title="Variable:", options=list(ds.data_vars)) select.js_on_change("value", callback) p = Figure( x_range=(-180, 180), y_range=(-90, 90), aspect_ratio=2.5, tools="pan,wheel_zoom,box_zoom,reset, hover", ) p.sizing_mode = "scale_width" p.image( image="to_plot", x=-180, y=-90, dw=360, dh=180, source=source, palette="Viridis11", ) script, div = components( column(column(select), p, sizing_mode="stretch_width")) return render_template( "app/bokeh_plot.html", script=script, div=div, data_file_id=_id, title=file_type.capitalize(), )
def routing(_id): ds = load_file(_id, "raw") variable_names = list(ds.data_vars) lon, lat = get_lon_lat_names(_id) if request.method == "POST": topo_variable = request.form["topo_var"] error = "" if not len(topo_variable): error += "Topography Variable not understood; " elif topo_variable not in variable_names: error += "Topography Variable not in data set" if request.form["orcafile"] == "custom": file = _validate_file(request) upload_file(file, data_file_id=_id, file_type="paleorca") if not len(error): body = {"id": _id, **request.form} send_preprocessing_message("routing", body) flash("Routing succesfully sent to engine") return redirect(url_for("app.steps", _id=_id)) flash(error) data_shape = tuple(ds.dims.values()) show_regrid = False if data_shape != (180, 360): show_regrid = True return render_template( "app/routing.html", _id=_id, variable_names=variable_names, show_regrid=show_regrid, data_shape=data_shape, )
def _load_ds(self, _id): self.loaded = False with self.app.app_context(): ds = load_file(_id, self.file_type) # If lat and lon are in varaibles move them to coords d = {} for var in ds.data_vars: if "lat" in var.lower() or "lon" in var.lower(): d[var] = var ds = ds.set_coords(d) self._lat_lon_ori = d self.curvilinear_coordinates = None number_coordinates_in_system = len( list(ds.coords.variables.values())[0].dims) # Standard Grid if number_coordinates_in_system == 1: pass # Curvilinear coordinates elif number_coordinates_in_system == 2: dims = list(ds[list(ds.coords)[0]].dims) # Store the true coordinates for export self.curvilinear_coordinates = list(ds.coords) # Add the dimension into the coordinates this results in an ij indexing ds.coords[dims[0]] = ds[dims[0]] ds.coords[dims[1]] = ds[dims[1]] # Remove the curvilinear coordinates from the original coordinates ds = ds.reset_coords() else: raise ValueError("Unknown number of Coordinates") self.ds = ds attributes = list(ds.keys()) self.attribute.options = attributes self.attribute.value = attributes[0] self._original_ds = ds.copy(deep=True) self.loaded = True return True