def predict_main(aoi_path): """this functions trakes as input the json file fr4om frontend and returns the subpolygones with co2 metrcis aoi: json file with area of intreste from frontend """ # download images from gee data_parent_path = os.path.join("..", "data", "raw") dataset_name = data.download_dataset(aoi_path, data_parent_path=data_parent_path, get_sent2=True, get_glc=False, get_ndvi=False) # preprocess data (creates dataset folder structure in data/preprocessed # from data import make_dataset data.make_dataset(dataset_name) # predict land cover prediction = predict_model(dataset_name) # prediction is a json with subpolygones # get co2 estimations prediction = calc_vegetation_co_metric( prediction) # adds attribute "veg_co2_metric" to predictions prediction = calc_soil_co_metric( prediction) # adds attribute "soil_co2_metric" to predictions #return prediction pass
def build_validation_data_loader(self) -> DataLoader: if not self.data_downloaded: self.download_directory = data.download_dataset( download_directory=self.download_directory, data_config=self.context.get_data_config(), ) self.data_downloaded = True validation_data = data.get_dataset(self.download_directory, train=False) return DataLoader(validation_data, batch_size=self.context.get_per_slot_batch_size())
def build_training_data_loader(self) -> DataLoader: if not self.data_downloaded: self.download_directory = data.download_dataset( download_directory=self.download_directory, data_config=self.context.get_data_config(), ) self.data_downloaded = True dataset = data.get_dataset(self.download_directory, train=True) return DataLoader(dataset, batch_size=self.context.get_per_slot_batch_size())
def build_training_data_loader(self) -> DataLoader: if not self.data_downloaded: self.download_directory = data.download_dataset( download_directory=self.download_directory, data_config=self.context.get_data_config(), ) self.data_downloaded = True train_data = data.get_multi_dataset(self.download_directory, train=True) return DataLoader( train_data, batch_size=self.context.get_per_slot_batch_size(), collate_fn=data.collate_fn, )