def pft(body): app = create_app() _id = body["id"] data = json.loads(body["data"]) resp_array = numpy.array(data["dataArray"]) # Make sure the data array is in the expected format # First col = cutoff latitudes # Next 13 cols are pft types assert len(resp_array[0] == 14) pft_values = resp_array[:, 1:] latitudes = resp_array[:, 0] # Make sure 90 is the last value assert latitudes[-1] == 90 # Load routing file with final topography with app.app_context(): ds = load_file(_id, "routing") assert set(ds.dims) == set(("x", "y")) assert len(ds.coords) == 2 # The PFT values are on a 360 x 720 grid # So we need to interpolate the values onto this grid lat_vals = numpy.arange(0, 180, 0.5) lon_vals = numpy.arange(0, 360, 0.5) ds = ds.interp({"y": lat_vals, "x": lon_vals}) topo = ds.topo.values ds = generate_pft_netcdf(topo, latitudes, pft_values) with app.app_context(): save_revision(_id, ds, "pft")
def routing(body): app = create_app() topo_variable = body["topo_var"] _id = body["id"] save_routing = True save_bathy = True save_soils = True save_topo_high_res = True if body.get("next_step_only", False): save_bathy = False # Load file with app.app_context(): ds = load_file(_id, "raw") ds_orca = load_file(_id, "paleorca") lon, lat = get_lon_lat_names(_id) latitudes = ds[lat].values topography = ds[topo_variable].values ds_routing, ds_bathy, ds_soils, ds_topo_high_res = run_routines( topography, latitudes, ds_orca) with app.app_context(): if save_routing: save_revision(_id, ds_routing, "routing") if save_bathy: save_revision(_id, ds_bathy, "bathy") if save_soils: save_revision(_id, ds_soils, "soils") if save_topo_high_res: save_revision(_id, ds_topo_high_res, "topo_high_res")
def invalidate(body): app = create_app() _id = body["id"] with app.app_context(): for task in body["tasks"]: if step_seen(_id, task): invalidate_step(_id, task)
def ahmcoef(body): app = create_app() _id = body["id"] with app.app_context(): ds = load_file(_id, "bathy") ds_out = create_ahmcoef(ds) with app.app_context(): save_revision(_id, ds_out, "ahmcoef")
def heatflow(body): app = create_app() _id = body["id"] with app.app_context(): ds = load_file(_id, "bathy") ds_out = create_heatflow(ds) with app.app_context(): save_revision(_id, ds_out, "heatflow")
def regrid(body): app = create_app() limits = body["limits"] lon_step = float(body["Longitude Step"]) lat_step = float(body["Latitude Step"]) interpolator = body["interpolator"] _id = body["id"] # Load file with app.app_context(): ds = load_file(_id, "raw") lon, lat = get_lon_lat_names(_id) # Extremities new_values = [] # Limits default_limits = [180, 90] for coord, step, default_limit in zip([lon, lat], [lon_step, lat_step], default_limits): if limits == "default": lower = -default_limit upper = default_limit elif limits == "data": # Get sorted values sorted_vals = numpy.sort(numpy.unique(ds[coord])) lower = ds[coord].min() - (sorted_vals[1] - sorted_vals[0]) / 2.0 upper = ds[coord].max() + (sorted_vals[-1] - sorted_vals[-2]) / 2.0 else: raise AttributeError("Unknown data type passed from UI") min_val = lower + step / 2.0 max_val = upper + step / 2.0 # TODO maybe we should use numpy.linspace here? new_values.append(numpy.arange(min_val, max_val, step)) # Interpolate data file interp_options = { lon: new_values[0], lat: new_values[1], } ds = ds.interp(interp_options, method=interpolator, kwargs=dict(fill_value=None)) print(f" [x] {datetime.now()} interpolation finished", flush=True) # Save file with app.app_context(): save_revision(_id, ds, "raw")
def app(): db_fd, db_path = tempfile.mkstemp() app = create_app({ "TESTING": True, "DATABASE": db_path, "AUTH": "basic", }) with app.app_context(): init_db() get_db().executescript(_data_sql) yield app os.close(db_fd) os.unlink(db_path)
def run_function(method, body): app = create_app() routing_key = method.routing_key print( f" [x] {datetime.now()} Received message from {routing_key} with body: {body.decode()}", flush=True, ) func = routing_key.split(".")[1] body = json.loads(body.decode()) params = func_params(func, body) if params is not None: _id = body["id"] if func != "invalidate": with app.app_context(): save_step(_id, func, params, up_to_date=False) eval(f"steps.{func}({params})") if func != "invalidate": with app.app_context(): save_step(_id, func, params, up_to_date=True)
def func_params(func, body): # If invalidated isn't in keys then this is a "root" call meaning it should be run if "invalidated" not in body.keys(): return body # If 'invalidated': 'y(es)' in the body then this means the step has been invalidated # It should be rerun IF it has already been run before OR has no params # We will rerun it with the same parameters if "invalidated" in body.keys() and body["invalidated"].lower() in [ "yes", "y" ]: if "has_params" in body.keys() and body["has_params"].lower() in [ "no", "n" ]: return body app = create_app() with app.app_context(): if step_seen(body["id"], func): return step_parameters(body["id"], func) return None
def calculate_weights(body): print(f"{datetime.now()} Calculating weights", flush=True) app = create_app() _id = body["id"] # Load file with app.app_context(): bathy_file = get_file_path(_id, "bathy", full=True) coords_file = get_file_path(_id, "weight_coords", full=True) subbasins_file = get_file_path(_id, "sub_basins", full=True) print(f"{datetime.now()} Running Mosaic Runner", flush=True) runner = mosaic.MosaicRunner(root="/tmp", cpl_dir="/usr/src", user_name=str(uuid.uuid4())) runner.run(bathy_file=bathy_file, coords_file=coords_file, subbasins_file=subbasins_file) # Tar files directly into directory temp_name = next(tempfile._get_candidate_names()) + ".tar.gz" temp_path = os.path.join(app.config["UPLOAD_FOLDER"], temp_name) print(f"{datetime.now()} Compressing files to tar", flush=True) subprocess.Popen( ["tar", "-cvzf", temp_path, "IGCM", "DOMSK"], cwd=os.path.join("/", "home", runner.user_name), ).wait() # Add file to db print(f"{datetime.now()} Saving new db file {temp_name} to database", flush=True) with app.app_context(): save_file_to_db(_id, temp_name, "weights") # Delete Folder print(f"{datetime.now()} Cleaning up") shutil.rmtree(os.path.join("/", "home", runner.user_name, "IGCM")) shutil.rmtree(os.path.join("/", "home", runner.user_name, "DOMSK")) runner.cleanup()
def test_config(): assert not create_app().testing assert create_app({"TESTING": True}).testing
def main(): connection = pika.BlockingConnection( pika.ConnectionParameters(host=os.environ["BROKER_HOSTNAME"])) app = create_app() channel = connection.channel() channel.exchange_declare(exchange="preprocessing", exchange_type="topic") channel.queue_declare(queue="preprocessing_python_task_queue", durable=True) channel.queue_bind( exchange="preprocessing", queue="preprocessing_python_task_queue", routing_key="preprocessing.*.python", ) def callback(ch, method, properties, body): routing_key = method.routing_key print( f" [x] {datetime.now()} Received message from {routing_key} with body: {body.decode()}", flush=True, ) func = routing_key.split(".")[1] body = json.loads(body.decode()) params = func_params(func, body) print(f"{datetime.now()} Params: {params}", flush=True) if params is not None: _id = body["id"] if func != "invalidate": with app.app_context(): save_step(_id, func, params, up_to_date=False) eval(f"steps.{func}({params})") if func != "invalidate": with app.app_context(): save_step(_id, func, params, up_to_date=True) routing_key_done = ".".join([*routing_key.split(".")[:2], "done"]) channel.basic_publish( exchange="preprocessing", routing_key=routing_key_done, body=json.dumps(body), properties=pika.BasicProperties( delivery_mode=2, # make message persistent ), ) print( " [x] {} Sent message to {} {}".format(datetime.now(), routing_key_done, body), flush=True, ) print(f" [x] {datetime.now()} Done", flush=True) ch.basic_ack(delivery_tag=method.delivery_tag) channel.basic_qos(prefetch_count=1) channel.basic_consume(queue="preprocessing_python_task_queue", on_message_callback=callback) print(f" [*] {datetime.now()} Waiting for messages. To exit press CTRL+C", flush=True) channel.start_consuming()
def __init__(self, **params): # How we are going to modify the values # Absolute => Set to that value # Relative => Base value + new value # Percentage => Base value + percentage self.calculation_type = pn.widgets.RadioButtonGroup( options=["Absolute", "Relative", "Percentage"], align="end") # Replacement value self.spinner = pn.widgets.IntInput(name="Replacement Value", value=0, align="start") # Whether to select Land or Ocean self.land = pn.widgets.Checkbox(name="Select Land", max_width=100, value=True) self.ocean = pn.widgets.Checkbox(name="Select Ocean", max_width=100, value=True) # Buttons self.apply = pn.widgets.Button(name="\u2713 Apply", align="end", button_type="primary") self.undo_button = pn.widgets.Button(name="\u21B6 Undo", align="end", button_type="warning") self.redo_button = pn.widgets.Button(name="Redo \u21B7", align="end", button_type="warning") self.apply_previous_changes = pn.widgets.Button( name="\u2713 Apply Previous Changes", align="end", button_type="primary") self.save_button = pn.widgets.Button(name="Save", align="end", button_type="success") # Description self.description = pn.widgets.input.TextAreaInput( name="Description: ", placeholder="Notable changes ...") # Mask self.mask = pn.widgets.Checkbox(name="Mask", max_width=100) self.mask_value = pn.widgets.IntInput(name="Mask Value", value=0) # Store the variable we want to look at and modify self.attribute = pn.widgets.Select(name="Variable", max_width=200, align="end") # Choose colormap self.colormap = pn.widgets.Select( name="Colormap", options=colormaps, value="Oleron", max_width=200, align="start", ) self.colormap_min = pn.widgets.FloatInput(name="Min Value", width=100) self.colormap_max = pn.widgets.FloatInput(name="Max Value", width=100, align="end") self.colormap_range_slider = pn.widgets.RangeSlider(width=400, show_value=False) self.colormap_delta = pn.widgets.FloatInput( name="Delta between values", value=0, align="end") # Link the viewing of multiple graphs together self.selection = link_selections.instance(unselected_alpha=0.4) # Parts of the display self.file_pane = pn.Column() self.graph_pane = pn.Column() self.options_pane = pn.Column() self.param.ds.default = xr.Dataset() self.param.loaded.default = False super().__init__(**params) self.apply.on_click(self._apply_values) self.apply_previous_changes.on_click(self._apply_previous_changes) self.undo_button.on_click(self.undo) self.redo_button.on_click(self.redo) self.save_button.on_click(self.save) self.save_button.js_on_click( args={ "target": pn.state.curdoc.session_context.request.arguments["redirect"] [0].decode() }, code="window.top.location.href = target", ) self._auto_update_cmap_min = True self._auto_update_cmap_max = True self.curvilinear_coordinates = None self._undo_list = [] self._redo_list = [] self.colormap_min.param.watch(self._colormap_callback, "value") self.colormap_max.param.watch(self._colormap_callback, "value") self.colormap_range_slider.param.watch(self._colormap_callback, "value") self.app = create_app() try: self.file_type = pn.state.curdoc.session_context.request.arguments[ "file_type"][0].decode() except KeyError: pass self.data_file_id = int( pn.state.curdoc.session_context.request.arguments["id"][0]) self._load_ds(self.data_file_id) self._options_pane_setup()