def land_surface_ids(): context = get_request_context() ls_data, _ = load_ref_data("land_surface_table", context) ls_ids = ls_data["surface_id"].to_list() yield ls_ids
def test_land_surface_loading(land_surface_loading_response_dicts, details, n_rows, n_nodes): key = n_rows, n_nodes land_surfaces = land_surface_loading_response_dicts[key] result = land_surface_loading(land_surfaces, details, context=get_request_context()) assert result.get("summary") is not None assert len(result.get("summary")) <= len(land_surfaces["land_surfaces"]) if details: assert result.get("details") is not None assert len(result.get("details")) == len( land_surfaces["land_surfaces"])
def get_valid_context(state: str = "state", region: str = "region") -> Dict[str, Any]: context = utils.get_request_context(state, region) isvalid, msg = utils.validate_request_context(context) if not isvalid: raise HTTPException(status_code=400, detail=msg) if not config.NEREID_FORCE_FOREGROUND: # pragma: no branch task = bg.background_validate_request_context.s( context=context).apply_async() isvalid, msg = task.get() if not isvalid: # pragma: no cover raise HTTPException(status_code=400, detail="Error in celery worker: " + msg) return context
def test_land_surface_loading_with_err(land_surface_loading_response_dicts, details, n_rows, n_nodes): key = n_rows, n_nodes land_surfaces = land_surface_loading_response_dicts[key] land_surfaces["land_surfaces"][5]["surface_key"] = r"¯\_(ツ)_/¯" result = land_surface_loading(land_surfaces, details, context=get_request_context()) assert "ERROR" in result.get("errors", [])[0] assert result.get("summary") is not None assert len(result.get("summary")) <= len(land_surfaces["land_surfaces"]) if details: assert result.get("details") is not None assert len(result.get("details")) == len( land_surfaces["land_surfaces"])
def test_get_request_context(state, region, dirname, context, exp): req_context = utils.get_request_context(state, region, dirname, context) assert all([k in req_context for k in exp.keys()])
def contexts(): cx1 = get_request_context() cx2 = deepcopy(cx1) cx2.pop("data_path") cx3 = deepcopy(cx1) cx3["data_path"] = r"¯\_(ツ)_/¯" cx4 = deepcopy(cx1) cx4.pop("project_reference_data") cx5 = deepcopy(cx1) cx5["project_reference_data"]["land_surface_table"].pop("file") cx6 = deepcopy(cx1) cx6["project_reference_data"]["land_surface_table"]["file"] = r"¯\_(ツ)_/¯" cx7 = deepcopy(cx1) cx7["project_reference_data"]["land_surface_table"] = [r"¯\_(ツ)_/¯"] cx8 = deepcopy(cx1) cx8["project_reference_data"]["land_surface_table"].pop("preprocess") cx9 = deepcopy(cx1) cx9["project_reference_data"]["land_surface_emc_table"][ "file"] = r"¯\_(ツ)_/¯" cx10 = deepcopy(cx1) cx10["project_reference_data"]["land_surface_emc_table"].pop("parameters") cx11 = deepcopy(cx1) del cx11["api_recognize"]["land_surfaces"]["preprocess"][0] # no joins cx12 = deepcopy(cx1) del cx12["api_recognize"]["land_surfaces"]["preprocess"][1] # no remaps cx13 = deepcopy(cx11) del cx13["api_recognize"]["land_surfaces"]["preprocess"][ 0] # no joins or remaps cx14 = deepcopy(cx11) cx14["api_recognize"]["land_surfaces"]["preprocess"].insert( 0, { "joins": [{ "other": "land_surface_table", "how": "left", "left_on": "surface_key", "right_on": "surface_id", }] }, ) cx15 = deepcopy(cx1) del cx15["api_recognize"]["treatment_facility"]["preprocess"][ 0] # no joins cx16 = deepcopy(cx1) cx16["api_recognize"]["land_surfaces"]["preprocess"][0]["joins"] = [{ "other": r"¯\_(ツ)_/¯", "how": "left", "left_on": "surface_key", "right_on": "surface_id", }] cx17 = deepcopy(cx1) cx17["api_recognize"]["land_surfaces"]["preprocess"][1]["remaps"] = [{ "left": r"¯\_(ツ)_/¯", "right": "imp_pct", "how": "addend", "mapping": { "COMM": 10, "RESMF": 15, "RESSFH": 20, "TRANS": 30, "WATER": 100, }, }] cx18 = deepcopy(cx1) cx18["api_recognize"]["land_surfaces"]["preprocess"][1]["remaps"] = [{ "left": "land_use", "right": "imp_pct", "how": r"¯\_(ツ)_/¯", "mapping": { "COMM": 10, "RESMF": 15, "RESSFH": 20, "TRANS": 30, "WATER": 100, }, }] cx19 = deepcopy(cx1) cx19["api_recognize"]["land_surfaces"]["preprocess"][1]["remaps"] = [{ "left": "land_use", "right": r"¯\_(ツ)_/¯", "how": "addend", "mapping": { "COMM": 10, "RESMF": 15, "RESSFH": 20, "TRANS": 30, "WATER": 100, }, }] cx20 = deepcopy(cx1) cx20["project_reference_data"]["land_surface_table"]["preprocess"][0][ "expand_fields"] = [{ "field": r"¯\_(ツ)_/¯", "sep": "-", "new_column_names": [1, 2, 3] }] cx21 = deepcopy(cx1) cx21["project_reference_data"]["dry_weather_flow_table"]["seasons"] = { "summer": None } cx22 = deepcopy(cx1) cx22["project_reference_data"]["dry_weather_flow_table"]["seasons"] = { r"¯\_(ツ)_/¯": ["these", "are", "months"] } keys = [ # these are easier to copy into tests "default", "default_no_data_path_invalid", "default_dne_data_path_invalid", "default_no_ref_data_invalid", "default_no_lst_file_invalid", "default_lst_file_dne_invalid", "default_lst_not_dict_invalid", "default_lst_no_expanded_fields_valid", "default_emc_file_dne_invalid", "default_emc_no_params_valid", "default_api_no_ls_joins_valid", "default_api_no_ls_remaps_valid", "default_api_no_ls_joins_or_remaps_valid", "default_api_ls_joins_no_merge_no_params_valid", "default_api_no_tf_joins_valid", "default_api_ls_joins_other_dne_valid", "default_api_ls_remap_left_dne_valid", "default_api_ls_remap_how_dne_valid", "default_api_ls_remap_right_dne_valid", "default_lst_expand_field_dne_valid", "default_dw_flow_null_months_valid", "default_dw_flow_unknown_season_valid", ] values = [ cx1, cx2, cx3, cx4, cx5, cx6, cx7, cx8, cx9, cx10, cx11, cx12, cx13, cx14, cx15, cx16, cx17, cx18, cx19, cx20, cx21, cx22, ] return {k: v for k, v in zip(keys, values)}