def load(cls, path: str, reset_paths: bool = False) -> "SimpleAbstractTrainer": load_path = path + cls.trainer_file_name if not reset_paths: return load_pkl.load(path=load_path) else: obj = load_pkl.load(path=load_path) obj.set_contexts(path) obj.reset_paths = reset_paths return obj
def load(cls, path: str, reset_paths=True, verbose=True): """ Loads the model from disk to memory. Parameters ---------- path : str Path to the saved model, minus the file name. This should generally be a directory path ending with a '/' character (or appropriate path separator value depending on OS). The model file is typically located in path + cls.model_file_name. reset_paths : bool, default True Whether to reset the self.path value of the loaded model to be equal to path. It is highly recommended to keep this value as True unless accessing the original self.path value is important. If False, the actual valid path and self.path may differ, leading to strange behaviour and potential exceptions if the model needs to load any other files at a later time. verbose : bool, default True Whether to log the location of the loaded file. Returns ------- model : cls Loaded model object. """ file_path = path + cls.model_file_name model = load_pkl.load(path=file_path, verbose=verbose) if reset_paths: model.set_contexts(path) return model
def lgb_trial(args, reporter): """ Training script for hyperparameter evaluation of Gradient Boosting model """ try: model, args, util_args = model_trial.prepare_inputs(args=args) try_import_lightgbm() import lightgbm as lgb dataset_train = lgb.Dataset(util_args.directory + util_args.dataset_train_filename) dataset_val = lgb.Dataset(util_args.directory + util_args.dataset_val_filename) X_val, y_val = load_pkl.load(util_args.directory + util_args.dataset_val_pkl_filename) fit_model_args = dict(dataset_train=dataset_train, dataset_val=dataset_val) predict_proba_args = dict(X=X_val) model_trial.fit_and_save_model(model=model, params=args, fit_args=fit_model_args, predict_proba_args=predict_proba_args, y_val=y_val, time_start=util_args.time_start, time_limit=util_args.get( 'time_limit', None), reporter=reporter) except Exception as e: if not isinstance(e, TimeLimitExceeded): logger.exception(e, exc_info=True) reporter.terminate()
def _load_oof(self): if self._oof_pred_proba is not None: pass else: oof = load_pkl.load(path=self.path + 'utils' + os.path.sep + self._oof_filename) self._oof_pred_proba = oof['_oof_pred_proba'] self._oof_pred_model_repeats = oof['_oof_pred_model_repeats']
def load_info(cls, path, reset_paths=True, load_model_if_required=True): load_path = path + cls.learner_info_name try: return load_pkl.load(path=load_path) except Exception as e: if load_model_if_required: learner = cls.load(path_context=path, reset_paths=reset_paths) return learner.get_info() else: raise e
def load(cls, file_prefix=""): """ Additional naming changes will be appended to end of file_prefix (must contain full absolute path) """ dataobj_file = file_prefix + cls.DATAOBJ_SUFFIX datalist_file = file_prefix + cls.DATAVALUES_SUFFIX dataset: TabularNNDataset = load_pkl.load(path=dataobj_file) data_list = mx.nd.load(datalist_file) dataset.generate_dataset_and_dataloader(data_list=data_list) logger.debug("TabularNN Dataset loaded from files: \n %s \n %s" % (dataobj_file, datalist_file)) return dataset
def load_info(cls, path, load_model_if_required=True) -> dict: load_path = path + cls.model_info_name try: return load_pkl.load(path=load_path) except: if load_model_if_required: model = cls.load(path=path, reset_paths=True) return model.get_info() else: raise
def load(cls, path: str, reset_paths=False, verbose=True): import torch obj: TabTransformerModel = load_pkl.load(path=path + cls.model_file_name, verbose=verbose) if reset_paths: obj.set_contexts(path) obj.model = torch.load(path + cls.params_file_name) return obj """
def load(cls, path_context, reset_paths=True): load_path = path_context + cls.learner_file_name obj = load_pkl.load(path=load_path) if reset_paths: obj.set_contexts(path_context) obj.trainer_path = obj.model_context obj.reset_paths = reset_paths # TODO: Still have to change paths of models in trainer + trainer object path variables return obj else: obj.set_contexts(obj.path_context) return obj
def load_info(cls, path, reset_paths=False, load_model_if_required=True) -> Dict[str, Any]: load_path = path + cls.trainer_info_name try: return load_pkl.load(path=load_path) except: # noqa if load_model_if_required: trainer = cls.load(path=path, reset_paths=reset_paths) return trainer.get_info() else: raise
def model_trial(args, reporter: LocalStatusReporter): """ Training script for hyperparameter evaluation of an arbitrary model that subclasses AbstractModel. Notes: - Model object itself must be passed as kwarg: model - All model hyperparameters must be stored in model.params dict that may contain special keys such as: 'seed_value' to ensure reproducibility 'num_threads', 'num_gpus' to set specific resources in model.fit() - model.save() must have return_filename, file_prefix, directory options """ try: model, args, util_args = prepare_inputs(args=args) X_train, y_train = load_pkl.load(util_args.directory + util_args.dataset_train_filename) X_val, y_val = load_pkl.load(util_args.directory + util_args.dataset_val_filename) fit_model_args = dict(X_train=X_train, y_train=y_train, X_val=X_val, y_val=y_val) predict_proba_args = dict(X=X_val) model = fit_and_save_model(model=model, params=args, fit_args=fit_model_args, predict_proba_args=predict_proba_args, y_val=y_val, time_start=util_args.time_start, time_limit=util_args.get( 'time_limit', None), reporter=None) except Exception as e: if not isinstance(e, TimeLimitExceeded): logger.exception(e, exc_info=True) reporter.terminate() else: reporter(epoch=1, validation_performance=model.val_score)
def load_oof(cls, path, verbose=True): try: oof = load_pkl.load(path=path + 'utils' + os.path.sep + cls._oof_filename, verbose=verbose) oof_pred_proba = oof['_oof_pred_proba'] oof_pred_model_repeats = oof['_oof_pred_model_repeats'] except FileNotFoundError: model = cls.load(path=path, reset_paths=True, verbose=verbose) model._load_oof() oof_pred_proba = model._oof_pred_proba oof_pred_model_repeats = model._oof_pred_model_repeats return cls._oof_pred_proba_func( oof_pred_proba=oof_pred_proba, oof_pred_model_repeats=oof_pred_model_repeats)
def load(cls, path, verbosity=2): set_logger_verbosity( verbosity, logger=logger ) # Reset logging after load (may be in new Python session) if path is None: raise ValueError("output_directory cannot be None in load()") path = setup_outputdir( path, warn_if_exist=False) # replace ~ with absolute path if it exists predictor: TabularPredictor = load_pkl.load(path=path + cls.predictor_file_name) learner = predictor._learner_type.load(path) predictor._set_post_fit_vars(learner=learner) try: from ...version import __version__ version_inference = __version__ except: version_inference = None # TODO: v0.1 Move version var to predictor object in the case where learner does not exist try: version_fit = predictor._learner.version except: version_fit = None if version_fit is None: version_fit = 'Unknown (Likely <=0.0.11)' if version_inference != version_fit: logger.warning('') logger.warning( '############################## WARNING ##############################' ) logger.warning( 'WARNING: AutoGluon version differs from the version used during the original model fit! This may lead to instability and it is highly recommended the model be loaded with the exact AutoGluon version it was fit with.' ) logger.warning(f'\tFit Version: {version_fit}') logger.warning(f'\tCurrent Version: {version_inference}') logger.warning( '############################## WARNING ##############################' ) logger.warning('') return predictor
def load(cls, path: str) -> "TimeSeriesPredictor": """Load an existing ``TimeSeriesPredictor`` from given ``path``. Parameters ---------- path: str Path where the predictor was saved via :meth:`~autogluon.timeseries.TimeSeriesPredictor.save`. Returns ------- predictor: TimeSeriesPredictor """ if not path: raise ValueError("`path` cannot be None or empty in load().") path = setup_outputdir(path, warn_if_exist=False) logger.info(f"Loading predictor from path {path}") learner = AbstractLearner.load(path) predictor = load_pkl.load(path=learner.path + cls.predictor_file_name) predictor._learner = learner return predictor
def load_model_base(self): return load_pkl.load(path=self.path + 'utils' + os.path.sep + 'model_template.pkl')
def load_train_data(self) -> TimeSeriesDataFrame: path = self.path_data + "train.pkl" return load_pkl.load(path=path)
def load_val_data(self) -> TimeSeriesDataFrame: path = self.path_data + "val.pkl" return load_pkl.load(path=path)