Esempio n. 1
0
    def _handler(self, request, response):
        loglevel, uhs_files, station_file, domain, config_file = collect_args_wrapper(
            request, self.workdir)

        log_handler(
            self,
            response,
            "Starting Process",
            logger,
            log_level=loglevel,
            process_step="start",
        )

        log_handler(
            self,
            response,
            "Rebuilding configuration",
            logger,
            log_level=loglevel,
            process_step="config_rebuild",
        )
        config_file = self.edit_config_file(config_file, uhs_files,
                                            station_file, domain)

        log_handler(
            self,
            response,
            "Run Parameter Conversion",
            logger,
            log_level=loglevel,
            process_step="process",
        )
        try:
            convert(config_file)
        except Exception as e:
            raise ProcessError(f"{type(e).__name__}: {e}")

        log_handler(
            self,
            response,
            "Building final output",
            logger,
            log_level=loglevel,
            process_step="build_output",
        )
        config = read_config(config_file)
        response.outputs["output"].file = get_outfile(config, "params")

        log_handler(
            self,
            response,
            "Process Complete",
            logger,
            log_level=loglevel,
            process_step="complete",
        )
        return response
    def _handler(self, request, response):
        loglevel = request.inputs["loglevel"][0].data
        log_handler(
            self,
            response,
            "Starting Process",
            logger,
            log_level=loglevel,
            process_step="start",
        )

        filepaths = get_filepaths(request.inputs["netcdf"])
        log_handler(
            self,
            response,
            f"Spliting climo files: {filepaths}",
            logger,
            log_level=loglevel,
            process_step="process",
        )
        output_filepaths = []
        for path in filepaths:
            try:
                input_file = CFDataset(path)
            except Exception:
                raise ProcessError(
                    "The input for netcdf file paths could not be converted to a netcdf dataset"
                )
            else:
                output_filepaths.extend(split_merged_climos(input_file, self.workdir))

        log_handler(
            self,
            response,
            "Building final output",
            logger,
            log_level=loglevel,
            process_step="build_output",
        )
        response.outputs["output"].data = build_meta_link(
            varname="split_climo",
            desc="Split climatologies",
            outfiles=output_filepaths,
            outdir=self.workdir,
        )

        log_handler(
            self,
            response,
            "Process Complete",
            logger,
            log_level=loglevel,
            process_step="complete",
        )
        return response
    def _handler(self, request, response):
        loglevel = request.inputs["loglevel"][0].data
        log_handler(
            self,
            response,
            "Starting Process",
            logger,
            log_level=loglevel,
            process_step="start",
        )

        updates_instruction = self.updates_instruction_generator(request)

        filepath = self.copy_and_get_filepath(request)
        with CFDataset(filepath, mode="r+") as dataset:
            log_handler(
                self,
                response,
                f"Updating {filepath} metadata",
                logger,
                log_level=loglevel,
                process_step="process",
            )
            process_updates(dataset, updates_instruction)

        log_handler(
            self,
            response,
            "Building final output",
            logger,
            log_level=loglevel,
            process_step="build_output",
        )
        response.outputs["output"].file = filepath

        log_handler(
            self,
            response,
            "Process Complete",
            logger,
            log_level=loglevel,
            process_step="complete",
        )
        return response
    def _handler(self, request, response):
        climdex_input, freq, func, loglevel, output_file = process_inputs_alpha(
            request.inputs, temp_pctl_inputs, self.workdir)

        log_handler(
            self,
            response,
            "Starting Process",
            logger,
            log_level=loglevel,
            process_step="start",
        )
        robjects.r("library(climdex.pcic)")
        vectors = []

        counter = 1
        total = len(climdex_input)

        for input in climdex_input:
            log_handler(
                self,
                response,
                f"Loading climdexInput from R data file {counter}/{total}",
                logger,
                log_level=loglevel,
                process_step="load_rdata",
            )
            cis = load_cis(input)

            log_handler(
                self,
                response,
                f"Processing climdex.{func} for file {counter}/{total}",
                logger,
                log_level=loglevel,
                process_step="process",
            )

            for ci_name, ci in cis.items():
                try:
                    robjects.r.assign("ci", ci)
                    mothly_pct = robjects.r(f"climdex.{func}(ci, '{freq}')")
                except RRuntimeError as e:
                    raise ProcessError(msg=f"{type(e).__name__}: {str(e)}")

                vector_name = f"{func}{counter}_{ci_name}"
                robjects.r.assign(vector_name, mothly_pct)
                vectors.append(vector_name)
            counter += 1

        log_handler(
            self,
            response,
            f"Saving {func} as R data file",
            logger,
            log_level=loglevel,
            process_step="save_rdata",
        )
        output_path = os.path.join(self.workdir, output_file)
        robjects.r["save"](*vectors, file=output_path)

        log_handler(
            self,
            response,
            "Building final output",
            logger,
            log_level=loglevel,
            process_step="build_output",
        )
        response.outputs["rda_output"].file = output_path

        # Clear R global env
        robjects.r("rm(list=ls())")

        log_handler(
            self,
            response,
            "Process Complete",
            logger,
            log_level=loglevel,
            process_step="complete",
        )
        return response
Esempio n. 5
0
    def _handler(self, request, response):
        (
            center_mean_on_last_day,
            climdex_input,
            freq,
            loglevel,
            num_days,
            output_file,
        ) = process_inputs_alpha(request.inputs, rxnday_inputs, self.workdir)

        log_handler(
            self,
            response,
            "Starting Process",
            logger,
            log_level=loglevel,
            process_step="start",
        )
        robjects.r("library(climdex.pcic)")
        vectors = []

        counter = 1
        total = len(climdex_input)

        for input in climdex_input:
            log_handler(
                self,
                response,
                f"Loading climdexInput from R data file {counter}/{total}",
                logger,
                log_level=loglevel,
                process_step="load_rdata",
            )
            cis = load_cis(input)

            log_handler(
                self,
                response,
                f"Processing Monthly Maximum {num_days}-day Precipitation for file {counter}/{total}",
                logger,
                log_level=loglevel,
                process_step="process",
            )

            for ci_name, ci in cis.items():
                try:
                    robjects.r.assign("ci", ci)
                    rxnday = self.rxnday_func(ci, num_days, freq,
                                              center_mean_on_last_day)
                except RRuntimeError as e:
                    raise ProcessError(msg=f"{type(e).__name__}: {str(e)}")

                vector_name = f"rx{num_days}day{counter}_{ci_name}"
                robjects.r.assign(vector_name, rxnday)
                vectors.append(vector_name)
            counter += 1

        log_handler(
            self,
            response,
            f"Saving rx{num_days}day vector to R data file",
            logger,
            log_level=loglevel,
            process_step="save_rdata",
        )
        output_path = os.path.join(self.workdir, output_file)
        robjects.r["save"](*vectors, file=output_path)

        log_handler(
            self,
            response,
            "Building final output",
            logger,
            log_level=loglevel,
            process_step="build_output",
        )
        response.outputs["rda_output"].file = output_path

        # Clear R global env
        robjects.r("rm(list=ls())")

        log_handler(
            self,
            response,
            "Process Complete",
            logger,
            log_level=loglevel,
            process_step="complete",
        )
        return response
    def _handler(self, request, response):
        loglevel = request.inputs["loglevel"][0].data
        log_handler(
            self,
            response,
            "Starting Process",
            logger,
            log_level=loglevel,
            process_step="start",
        )

        source_file = self.get_filepath(request)
        variable = request.inputs["variable"][0].data
        dest_file = os.path.join(self.workdir,
                                 request.inputs["dest_file"][0].data)

        source = Dataset(source_file, "r", format="NETCDF4")

        log_handler(
            self,
            response,
            f"Checking {source_file} and {variable}",
            logger,
            log_level=loglevel,
            process_step="input_check",
        )
        try:
            source_check(source)
        except AttributeError as e:
            raise ProcessError(
                "netcdf file does not have latitude and longitude dimensions")
        except ValueError as e:
            raise ProcessError(
                "netcdf file does not have a valid flow variable")

        try:
            variable_check(source, variable)
        except AttributeError as e:
            raise ProcessError(
                f"Variable is either not found in netcdf file or is not associated with a grid"
            )
        except ValueError as e:
            raise ProcessError("Variable is not a valid flow routing")

        log_handler(
            self,
            response,
            "Decomposing flow direction vectors into grids",
            logger,
            log_level=loglevel,
            process_step="process",
        )

        try:
            decompose_flow_vectors(source, dest_file, variable)
        except Exception as e:
            raise ProcessError(f"{type(e).__name__}: {e}")

        log_handler(
            self,
            response,
            "Building final output",
            logger,
            log_level=loglevel,
            process_step="build_output",
        )
        response.outputs["output"].file = dest_file

        log_handler(
            self,
            response,
            "Process Complete",
            logger,
            log_level=loglevel,
            process_step="complete",
        )
        return response
    def _handler(self, request, response):
        (
            csv,
            date_range,
            region,
            geoserver,
            connection_string,
            ensemble,
            thredds,
            loglevel,
        ) = [
            arg for arg in collect_args(request.inputs, self.workdir).values()
        ]

        connection_string = update_connection(connection_string)

        log_handler(
            self,
            response,
            "Starting Process",
            logger,
            log_level=loglevel,
            process_step="start",
        )

        csv.seek(0)
        csv_content = csv.read()

        try:
            csv_content = csv_content.decode("utf-8")
        except (UnicodeDecodeError, AttributeError):
            pass

        with NamedTemporaryFile(mode="w+", suffix=".csv") as temp_csv:
            temp_csv.write(csv_content)
            temp_csv.seek(0)

            log_handler(
                self,
                response,
                "Resolving impacts rules",
                logger,
                log_level=loglevel,
                process_step="process",
            )
            try:
                resolved = resolve_rules(
                    temp_csv.name,
                    date_range,
                    get_region(region, geoserver),
                    ensemble,
                    connection_string,
                    thredds,
                    loglevel,
                )
            except Exception as e:
                custom_process_error(e)

        log_handler(
            self,
            response,
            "Cleaning and building final output",
            logger,
            log_level=loglevel,
            process_step="build_output",
        )

        filepath = os.path.join(self.workdir, "resolved.json")
        with open(filepath, "w") as f:
            json.dump(resolved, f)

        response.outputs["json"].file = filepath
        log_handler(
            self,
            response,
            "Process Complete",
            logger,
            log_level=loglevel,
            process_step="complete",
        )
        return response
Esempio n. 8
0
    def _handler(self, request, response):
        climdex_input, loglevel, output_file = process_inputs_alpha(
            request.inputs, sdii_inputs, self.workdir)

        log_handler(
            self,
            response,
            "Starting Process",
            logger,
            log_level=loglevel,
            process_step="start",
        )
        climdex = get_package("climdex.pcic")
        vectors = []

        counter = 1
        total = len(climdex_input)

        for input in climdex_input:
            log_handler(
                self,
                response,
                f"Loading climdexInput from R data file {counter}/{total}",
                logger,
                log_level=loglevel,
                process_step="load_rdata",
            )
            cis = load_cis(input)

            log_handler(
                self,
                response,
                f"Processing the mean daily diurnal temperature range for file {counter}/{total}",
                logger,
                log_level=loglevel,
                process_step="process",
            )

            for ci_name, ci in cis.items():
                try:
                    robjects.r.assign("ci", ci)
                    sdii = climdex.climdex_sdii(ci)
                except RRuntimeError as e:
                    raise ProcessError(msg=f"{type(e).__name__}: {str(e)}")

                vector_name = f"sdii{counter}_{ci_name}"
                robjects.r.assign(vector_name, sdii)
                vectors.append(vector_name)
            counter += 1

        log_handler(
            self,
            response,
            "Saving dtr vector to R data file",
            logger,
            log_level=loglevel,
            process_step="save_rdata",
        )
        output_path = os.path.join(self.workdir, output_file)
        robjects.r["save"](*vectors, file=output_path)

        log_handler(
            self,
            response,
            "Building final output",
            logger,
            log_level=loglevel,
            process_step="build_output",
        )
        response.outputs["rda_output"].file = output_path

        # Clear R global env
        robjects.r("rm(list=ls())")

        log_handler(
            self,
            response,
            "Process Complete",
            logger,
            log_level=loglevel,
            process_step="complete",
        )
        return response
Esempio n. 9
0
    def _handler(self, request, response):
        (
            loglevel,
            version,
            np,
            case_id,
            grid_id,
            run_startdate,
            stop_date,
            pour_points,
            uh_box,
            routing,
            domain,
            input_forcings,
            params_config_file,
            params_config_dict,
            convolve_config_file,
            convolve_config_dict,
        ) = collect_args_wrapper(
            request,
            self.workdir,
            modules=[parameters.__name__, convolution.__name__])

        log_handler(
            self,
            response,
            "Starting Process",
            logger,
            log_level=loglevel,
            process_step="start",
        )
        if version:
            logger.info(version)

        log_handler(
            self,
            response,
            "Rebuilding Parameters configuration",
            logger,
            log_level=loglevel,
            process_step="params_config_rebuild",
        )

        uh_box_content = prep_csv(uh_box)
        pour_points_content = prep_csv(pour_points)

        with NamedTemporaryFile(
                mode="w+", suffix=".csv") as temp_uh_box, NamedTemporaryFile(
                    mode="w+", suffix=".csv") as temp_pour_points:
            temp_uh_box.write(uh_box_content)
            temp_uh_box.seek(0)
            temp_pour_points.write(pour_points_content)
            temp_pour_points.seek(0)

            params_config = params_config_handler(
                self.workdir,
                case_id,
                domain,
                grid_id,
                temp_pour_points.name,
                routing,
                temp_uh_box.name,
                params_config_file,
                params_config_dict,
            )

            log_handler(
                self,
                response,
                "Processing parameters",
                logger,
                log_level=loglevel,
                process_step="params_process",
            )
            try:
                parameters(params_config, np)
            except Exception as e:
                raise ProcessError(f"{type(e).__name__}: {e}")

        log_handler(
            self,
            response,
            "Building parameters file",
            logger,
            log_level=loglevel,
            process_step="params_build",
        )
        params_file = get_outfile(params_config, "params")

        log_handler(
            self,
            response,
            "Rebuilding Convolution configuration",
            logger,
            log_level=loglevel,
            process_step="convolve_config_rebuild",
        )
        convolve_config = convolve_config_handler(
            self.workdir,
            case_id,
            run_startdate,
            stop_date,
            domain,
            params_file,
            input_forcings,
            convolve_config_file,
            convolve_config_dict,
        )

        log_handler(
            self,
            response,
            "Run Flux Convolution",
            logger,
            log_level=loglevel,
            process_step="convolution_process",
        )
        try:
            convolution(convolve_config)
        except Exception as e:
            raise ProcessError(f"{type(e).__name__}: {e}")

        log_handler(
            self,
            response,
            "Building final flow data output",
            logger,
            log_level=loglevel,
            process_step="build_output",
        )
        response.outputs["output"].file = get_outfile(convolve_config, "hist")

        log_handler(
            self,
            response,
            "Process Complete",
            logger,
            log_level=loglevel,
            process_step="complete",
        )
        return response
Esempio n. 10
0
    def _handler(self, request, response):
        loglevel = request.inputs["loglevel"][0].data
        log_handler(
            self,
            response,
            "Starting Process",
            logger,
            log_level=loglevel,
            process_step="start",
        )

        conditions = request.inputs["conditions"]

        log_handler(
            self,
            response,
            "Building parse tree",
            logger,
            log_level=loglevel,
            process_step="process",
        )

        try:
            parsed_vars = {}
            for condition in conditions:
                parse_tree, vars, region_var = build_parse_tree(condition.data)
                parsed_vars[condition.data] = {
                    "parse_tree": parse_tree,
                    "variables": vars,
                    "region_variable": region_var,
                }
        except SyntaxError as e:
            raise ProcessError(
                f"{type(e).__name__}: Invalid syntax in condition {conditions.index(condition)}"
            )
        except ValueError as e:
            raise ProcessError(
                f"{type(e).__name__}: variable name should have 5 values, variable, "
                "time_of_year, temporal, spatial, and percentile"
            )
        except Exception as e:
            raise ProcessError(f"{type(e).__name__}: {e}")

        log_handler(
            self,
            response,
            "Cleaning and building final output",
            logger,
            log_level=loglevel,
            process_step="build_output",
        )

        filepath = os.path.join(self.workdir, "parsed_vars.json")
        with open(filepath, "w") as f:
            json.dump(parsed_vars, f)

        response.outputs["json"].file = filepath

        log_handler(
            self,
            response,
            "Process Complete",
            logger,
            log_level=loglevel,
            process_step="complete",
        )
        return response
Esempio n. 11
0
    def _handler(self, request, response):
        rules, parse_tree_path, variables_path, loglevel = [
            arg for arg in collect_args(request.inputs, self.workdir).values()
        ]

        log_handler(
            self,
            response,
            "Starting Process",
            logger,
            log_level=loglevel,
            process_step="start",
        )

        try:
            with open(parse_tree_path) as json_file:
                parse_tree = json.load(json_file)
        except (TypeError, json.JSONDecodeError) as e:
            raise ProcessError(
                f"{type(e).__name__}: Invalid parse tree file. {e}")

        try:
            with open(variables_path) as json_file:
                collected_variables = json.load(json_file)
        except (TypeError, json.JSONDecodeError) as e:
            raise ProcessError(
                f"{type(e).__name__}: Invalid variables file. {e}")

        variable_getter = partial(get_dict_val, collected_variables)
        rule_getter = partial(get_dict_val, parse_tree)

        log_handler(
            self,
            response,
            "Evaluating expression",
            logger,
            log_level=loglevel,
            process_step="process",
        )

        try:
            truth_values = {
                rule: evaluate_rule(rule, rule_getter, variable_getter)
                for rule in rules
            }
        except NotImplementedError as e:
            raise ProcessError(
                f"{type(e).__name__}: Unable to process expression "
                "because it contains invalid characters")
        except Exception as e:
            raise ProcessError(f"{type(e).__name__}: {e}")

        log_handler(
            self,
            response,
            "Cleaning and building final output",
            logger,
            log_level=loglevel,
            process_step="build_output",
        )

        filepath = os.path.join(self.workdir, "truth_values.json")
        with open(filepath, "w") as f:
            json.dump(truth_values, f)

        response.outputs["json"].file = filepath

        log_handler(
            self,
            response,
            "Process Complete",
            logger,
            log_level=loglevel,
            process_step="complete",
        )
        return response
Esempio n. 12
0
    def _handler(self, request, response):
        (
            data_file,
            data_vector,
            loglevel,
            output_file,
            quantiles_vector,
            vector_name,
        ) = process_inputs_alpha(request.inputs, quantile_inputs, self.workdir)
        validate_vectors([quantiles_vector])
        r_valid_name(vector_name)

        log_handler(
            self,
            response,
            "Starting Process",
            logger,
            log_level=loglevel,
            process_step="start",
        )
        climdex = get_package("climdex.pcic")

        log_handler(
            self,
            response,
            "Loading R data file",
            logger,
            log_level=loglevel,
            process_step="load_rdata",
        )

        if data_file:
            data = self.unpack_data_file(data_file, data_vector)
        else:
            data = robjects.r(data_vector)

        log_handler(
            self,
            response,
            "Processing climdex.quantile",
            logger,
            log_level=loglevel,
            process_step="process",
        )

        try:
            quantiles = robjects.r(quantiles_vector)
            quantile_vector = climdex.climdex_quantile(data, quantiles)
        except RRuntimeError as e:
            raise ProcessError(msg=f"{type(e).__name__}: {str(e)}")

        log_handler(
            self,
            response,
            "Saving quantile as R data file",
            logger,
            log_level=loglevel,
            process_step="save_rdata",
        )
        output_path = os.path.join(self.workdir, output_file)
        save_python_to_rdata(vector_name, quantile_vector, output_path)

        log_handler(
            self,
            response,
            "Building final output",
            logger,
            log_level=loglevel,
            process_step="build_output",
        )
        response.outputs["rda_output"].file = output_path
        response.outputs["output_vector"].data = str(quantile_vector)

        # Clear R global env
        robjects.r("rm(list=ls())")

        log_handler(
            self,
            response,
            "Process Complete",
            logger,
            log_level=loglevel,
            process_step="complete",
        )
        return response
    def _handler(self, request, response):
        (
            climo,
            resolutions,
            convert_longitudes,
            split_vars,
            split_intervals,
            dry_run,
            operation,
            loglevel,
        ) = self.collect_args(request)

        log_handler(
            self,
            response,
            "Starting Process",
            logger,
            log_level=loglevel,
            process_step="start",
        )

        filepaths = get_filepaths(request.inputs["netcdf"])
        if dry_run:
            log_handler(
                self,
                response,
                "Dry Run",
                logger,
                log_level=loglevel,
                process_step="dry_run",
            )
            del response.outputs["output"]  # remove unnecessary output
            dry_files = [
                dry_run_info(
                    dry_output_filename(self.workdir, filepath),
                    dry_run_handler,
                    filepath=filepath,
                    climo=climo,
                ) for filepath in filepaths
            ]

            response.outputs["dry_output"].data = build_meta_link(
                varname="dry_run",
                desc="Dry Run",
                outfiles=dry_files,
                format_name="text",
                fmt=FORMATS.TEXT,
            )

        else:
            del response.outputs["dry_output"]  # remove unnecessary output
            response.update_status("Processing filepaths", 10)
            for filepath in filepaths:
                log_handler(
                    self,
                    response,
                    f"Processing {filepath} into climatologies",
                    logger,
                    log_level=loglevel,
                    process_step="process",
                )

                if input_check(filepath, climo) == (None, []):
                    raise ProcessError(
                        "Invlaid input file. Run generate climos with dry run as True for more information."
                    )

                try:
                    generate_climos(
                        filepath,
                        self.workdir,
                        operation,
                        climo,
                        convert_longitudes=convert_longitudes,
                        split_vars=split_vars,
                        split_intervals=split_intervals,
                        resolutions=resolutions,
                    )
                except Exception as e:
                    raise ProcessError(f"{type(e).__name__}: {e}")

            log_handler(
                self,
                response,
                "Collecting climo files",
                logger,
                log_level=loglevel,
                process_step="collect_files",
            )

            climo_files = collect_output_files(self.get_identifier(operation),
                                               self.workdir)

            log_handler(
                self,
                response,
                "Building final output",
                logger,
                log_level=loglevel,
                process_step="build_output",
            )
            response.outputs["output"].data = build_meta_link(
                varname="climo",
                desc="Climatology",
                outfiles=climo_files,
                outdir=self.workdir,
            )

        log_handler(
            self,
            response,
            "Process Complete",
            logger,
            log_level=loglevel,
            process_step="complete",
        )
        return response
Esempio n. 14
0
    def _handler(self, request, response):
        climdex_input, loglevel, output_file, threshold = process_inputs_alpha(
            request.inputs, rmm_inputs, self.workdir)

        log_handler(
            self,
            response,
            "Starting Process",
            logger,
            log_level=loglevel,
            process_step="start",
        )
        climdex = get_package("climdex.pcic")
        vectors = []

        counter = 1
        total = len(climdex_input)

        for input in climdex_input:
            log_handler(
                self,
                response,
                f"Loading climdexInput from R data file {counter}/{total}",
                logger,
                log_level=loglevel,
                process_step="load_rdata",
            )
            cis = load_cis(input)

            log_handler(
                self,
                response,
                f"Processing the annual count of days where daily precipitation is more than {threshold}mm per day for file {counter}/{total}",
                logger,
                log_level=loglevel,
                process_step="process",
            )

            for ci_name, ci in cis.items():
                try:
                    robjects.r.assign("ci", ci)
                    count_days = self.threshold_func(threshold, ci)
                except RRuntimeError as e:
                    raise ProcessError(msg=f"{type(e).__name__}: {str(e)}")

                vector_name = f"r{threshold}mm{counter}_{ci_name}"
                robjects.r.assign(vector_name, count_days)
                vectors.append(vector_name)
            counter += 1

        log_handler(
            self,
            response,
            f"Saving climdex.r{threshold}mm outputs to R data file",
            logger,
            log_level=loglevel,
            process_step="save_rdata",
        )
        output_path = os.path.join(self.workdir, output_file)
        robjects.r["save"](*vectors, file=output_path)

        log_handler(
            self,
            response,
            "Building final output",
            logger,
            log_level=loglevel,
            process_step="build_output",
        )
        response.outputs["rda_output"].file = output_path

        # Clear R global env
        robjects.r("rm(list=ls())")

        log_handler(
            self,
            response,
            "Process Complete",
            logger,
            log_level=loglevel,
            process_step="complete",
        )
        return response
Esempio n. 15
0
    def _handler(self, request, response):
        (
            loglevel,
            case_id,
            run_startdate,
            stop_date,
            domain,
            param_file,
            input_forcings,
            convolve_config_file,
            convolve_config_dict,
        ) = collect_args_wrapper(request,
                                 self.workdir,
                                 modules=[convolution.__name__])

        log_handler(
            self,
            response,
            "Starting Process",
            logger,
            log_level=loglevel,
            process_step="start",
        )

        log_handler(
            self,
            response,
            "Rebuilding configuration",
            logger,
            log_level=loglevel,
            process_step="config_rebuild",
        )
        config = convolve_config_handler(
            self.workdir,
            case_id,
            run_startdate,
            stop_date,
            domain,
            param_file,
            input_forcings,
            convolve_config_file,
            convolve_config_dict,
        )

        log_handler(
            self,
            response,
            "Run Flux Convolution",
            logger,
            log_level=loglevel,
            process_step="process",
        )
        try:
            convolution(config)
        except Exception as e:
            raise ProcessError(f"{type(e).__name__}: {e}")

        log_handler(
            self,
            response,
            "Building final flow data output",
            logger,
            log_level=loglevel,
            process_step="build_output",
        )
        response.outputs["output"].file = get_outfile(config, "hist")

        log_handler(
            self,
            response,
            "Process Complete",
            logger,
            log_level=loglevel,
            process_step="complete",
        )
        return response
    def _handler(self, request, response):
        (chunk_size, loglevel, dry_run, output_file) = self.collect_args(request)
        log_handler(
            self,
            response,
            "Starting Process",
            logger,
            log_level=loglevel,
            process_step="start",
        )

        filepaths = self.get_filepaths(request)

        if dry_run:
            log_handler(
                self,
                response,
                "Dry Run",
                logger,
                log_level=loglevel,
                process_step="dry_run",
            )
            del response.outputs["output"]  # remove unnecessary output
            dry_file = dry_run_info(
                dry_output_filename(self.workdir, "prsn_dry.txt"),
                dry_run_handler,
                filepaths=filepaths,
            )
            response.outputs["dry_output"].file = dry_file

        else:
            del response.outputs["dry_output"]  # remove unnecessary output

            log_handler(
                self,
                response,
                f"Processing {filepaths} into snowfall fluxes",
                logger,
                log_level=loglevel,
                process_step="process",
            )

            try:
                generate_prsn_file(filepaths, chunk_size, self.workdir, output_file)
            except Exception as e:
                raise ProcessError(f"{type(e).__name__}: {e}")

            log_handler(
                self,
                response,
                "Collecting snowfall files",
                logger,
                log_level=loglevel,
                process_step="collect_files",
            )
            (prsn_file,) = collect_output_files("prsn", self.workdir)

            log_handler(
                self,
                response,
                "Building final output",
                logger,
                log_level=loglevel,
                process_step="build_output",
            )
            response.outputs["output"].file = os.path.join(self.workdir, prsn_file)

        log_handler(
            self,
            response,
            "Process Complete",
            logger,
            log_level=loglevel,
            process_step="complete",
        )
        return response
Esempio n. 17
0
    def _handler(self, request, response):
        (
            base_range,
            cal,
            date_fields,
            date_format,
            loglevel,
            max_missing_days,
            min_base_data_fraction_present,
            n,
            northern_hemisphere,
            output_file,
            prec_column,
            prec_file,
            prec_name,
            prec_qtiles,
            quantiles,
            tavg_column,
            tavg_file,
            tavg_name,
            temp_qtiles,
            tmax_column,
            tmax_file,
            tmax_name,
            tmin_column,
            tmin_file,
            tmin_name,
            vector_name,
        ) = process_inputs_alpha(request.inputs, raw_inputs, self.workdir)

        validate_vectors([
            base_range,
            date_fields,
            temp_qtiles,
            prec_qtiles,
            max_missing_days,
        ])

        log_handler(
            self,
            response,
            "Starting Process",
            logger,
            log_level=loglevel,
            process_step="start",
        )
        climdex = get_package("climdex.pcic")
        robjects.r("library(PCICt)")

        log_handler(
            self,
            response,
            "Prepare parameters for climdexInput.raw",
            logger,
            log_level=loglevel,
            process_step="prepare_params",
        )
        params = self.prepare_parameters(
            request,
            tmax_name,
            tmin_name,
            prec_name,
            tavg_name,
            tmax_column,
            tmin_column,
            prec_column,
            tavg_column,
            date_fields,
            date_format,
            cal,
            prec_file,
            tavg_file,
            tmax_file,
            tmin_file,
        )

        log_handler(
            self,
            response,
            "Processing climdexInput.raw",
            logger,
            log_level=loglevel,
            process_step="process",
        )

        try:
            ci = climdex.climdexInput_raw(
                **params,
                base_range=robjects.r(base_range),
                n=n,
                northern_hemisphere=northern_hemisphere,
                quantiles=robjects.r(quantiles),
                temp_qtiles=robjects.r(temp_qtiles),
                prec_qtiles=robjects.r(prec_qtiles),
                max_missing_days=robjects.r(max_missing_days),
                min_base_data_fraction_present=min_base_data_fraction_present,
            )
        except RRuntimeError as e:
            raise ProcessError(msg=f"{type(e).__name__}: {str(e)}")

        log_handler(
            self,
            response,
            "Saving climdexInput as R data file",
            logger,
            log_level=loglevel,
            process_step="save_rdata",
        )
        output_path = os.path.join(self.workdir, output_file)
        r_valid_name(vector_name)
        save_python_to_rdata(vector_name, ci, output_path)

        log_handler(
            self,
            response,
            "Building final output",
            logger,
            log_level=loglevel,
            process_step="build_output",
        )
        response.outputs["climdexInput"].file = output_path

        # Clear R global env
        robjects.r("rm(list=ls())")

        return response
Esempio n. 18
0
    def _handler(self, request, response):
        ci_name, climdex_single_input, loglevel, output_file = process_inputs_alpha(
            request.inputs, avail_indices_inputs, self.workdir)

        log_handler(
            self,
            response,
            "Starting Process",
            logger,
            log_level=loglevel,
            process_step="start",
        )
        climdex = get_package("climdex.pcic")

        log_handler(
            self,
            response,
            "Loading climdexInput from R data file",
            logger,
            log_level=loglevel,
            process_step="load_rdata",
        )
        ci = get_robj(climdex_single_input, ci_name)

        log_handler(
            self,
            response,
            "Processing climdex_get_available_indices",
            logger,
            log_level=loglevel,
            process_step="process",
        )

        try:
            robjects.r.assign("ci", ci)
            avail_indices = climdex.climdex_get_available_indices(ci, False)
        except RRuntimeError as e:
            raise ProcessError(msg=f"{type(e).__name__}: {str(e)}")

        avail_processes = self.available_processes(avail_indices)

        log_handler(
            self,
            response,
            "Building final output",
            logger,
            log_level=loglevel,
            process_step="build_output",
        )
        response.outputs["avail_processes"].data = avail_processes

        # Clear R global env
        robjects.r("rm(list=ls())")

        log_handler(
            self,
            response,
            "Process Complete",
            logger,
            log_level=loglevel,
            process_step="complete",
        )
        return response