def train_for_region(self, data_source, region_type, region_name, train_start_date, train_end_date, search_space, search_parameters, train_loss_function, input_filepath): observations = DataFetcherModule.get_observations_for_region(region_type, region_name, data_source=data_source, filepath=input_filepath) region_metadata = DataFetcherModule.get_regional_metadata(region_type, region_name, data_source=data_source) return self.train(region_metadata, observations, train_start_date, train_end_date, search_space, search_parameters, train_loss_function)
def predict_for_region(self, region_type, region_name, run_day, forecast_start_date, forecast_end_date): observations = DataFetcherModule.get_observations_for_region(region_type, region_name) region_metadata = DataFetcherModule.get_regional_metadata(region_type, region_name) return self.predict(region_type, region_name, region_metadata, observations, run_day, forecast_start_date, forecast_end_date)
def evaluate_for_region(self, region_type, region_name, run_day, test_start_date, test_end_date, loss_functions): observations = DataFetcherModule.get_observations_for_region( region_type, region_name) region_metadata = DataFetcherModule.get_regional_metadata( region_type, region_name) return self.evaluate(region_metadata, observations, run_day, test_start_date, test_end_date, loss_functions)
def train_for_region(self, region_type, region_name, train_start_date, train_end_date, search_space, search_parameters, train_loss_function): observations = DataFetcherModule.get_observations_for_region( region_type, region_name) region_metadata = DataFetcherModule.get_regional_metadata( region_type, region_name) return self.train(region_metadata, observations, train_start_date, train_end_date, search_space, search_parameters, train_loss_function)
def evaluate_for_region(self, data_source, region_type, region_name, run_day, test_start_date, test_end_date, loss_functions, input_filepath): observations = DataFetcherModule.get_observations_for_region( region_type, region_name, data_source=data_source, filepath=input_filepath) region_metadata = DataFetcherModule.get_regional_metadata( region_type, region_name, data_source=data_source) return self.evaluate(region_metadata, observations, run_day, test_start_date, test_end_date, loss_functions)
def predict_for_region(self, data_source, region_type, region_name, run_day, forecast_start_date, forecast_end_date, input_filepath): observations = DataFetcherModule.get_observations_for_region( region_type, region_name, data_source=data_source, filepath=input_filepath) region_metadata = DataFetcherModule.get_regional_metadata( region_type, region_name, data_source=data_source) return self.predict(region_type, region_name, region_metadata, observations, run_day, forecast_start_date, forecast_end_date)
def predict_for_region(self, data_source: DataSource, region_type: str, region_name: List[str], run_day: str, start_date: str, input_type: InputType, time_intervals: List[ForecastTimeInterval], input_filepath: str): """ method downloads data using data fetcher module and then run predict on that dataset. @param region_type: region_type supported by data_fetcher module @param region_name: region_name supported by data_fetcher module @param run_day: date of initialization @param start_date: start_date @param input_type: input_type can be npi_list/param_override @param time_intervals: list of time_intervals with parameters @param data_source: data source @param input_filepath: input data file path @return: pd.DataFrame: predictions """ observations = DataFetcherModule.get_observations_for_region(region_type, region_name, data_source=data_source, filepath=input_filepath) region_metadata = DataFetcherModule.get_regional_metadata(region_type, region_name) return self.predict(region_type, region_name, region_metadata, observations, run_day, start_date, input_type, time_intervals)