def deserialize( cls, path: Path, ctx: Optional[mx.Context] = None) -> "NBEATSEnsemblePredictor": # deserialize constructor parameters with (path / "parameters.json").open("r") as fp: parameters = load_json(fp.read()) # basically save each predictor in its own sub-folder num_predictors = parameters["num_predictors"] num_digits = len(str(num_predictors)) predictors = [] # load all the predictors individually and also make sure not to load anything else by mistake predictor_locations = [ f"predictor_{str(index).zfill(num_digits)}" for index in range(num_predictors) ] # deserialize predictors for sub_dir in predictor_locations: predictors.append( RepresentableBlockPredictor.deserialize(path / sub_dir, ctx)) return NBEATSEnsemblePredictor( prediction_length=parameters["prediction_length"], freq=parameters["freq"], predictors=predictors, aggregation_method=parameters["aggregation_method"], )
def init_model(): epochs = None context = 'cpu' if args.epochs is not None: epochs = args.epochs if args.gpu: context = 'gpu' predictor = None if args.train: my_trainer = Trainer( ctx=context ) # TODO: Find a way to make it such that we do not set epoch when there is no need to estimator = DeepAREstimator(freq="5min", prediction_length=args.prediction, trainer=my_trainer) predictor = estimator.train(training_data=training_data) predictor.serialize(Path("models/")) else: # predictor = Predictor.deserialize(Path("models/")) predictor = RepresentableBlockPredictor.deserialize(Path("models/")) predictor.ctx = mx.Context('cpu') return predictor