예제 #1
0
 def config_save(self, config_file=None):
     """
     save all configs to file.
     :param config_file:
     :return:
     """
     config_file = config_file or os.path.join(DEFAULT_CONFIG_DIR, "{}_{}.json".
                                               format(self.name, self.time))
     save_config(config_file, self.config, replace=True)
     return config_file
예제 #2
0
 def save(self, file):
     """
     save pipeline to file, contains feature transformer, model, trial config.
     :param file:
     :return:
     """
     if not os.path.isdir(file):
         os.mkdir(file)
     model_path = os.path.join(file, "weights_tune.h5")
     config_path = os.path.join(file, "all_config.json")
     self.feature_transformers.save(config_path, replace=True)
     self.model.save(model_path, config_path)
     # check if ** is needed
     save_config(config_path, self.config)
예제 #3
0
 def save(self, file_path, replace=False):
     """
     save the feature tools internal variables as well as the initialization args.
     Some of the variables are derived after fit_transform, so only saving config is not enough.
     :param: file : the file to be saved
     :return:
     """
     # for StandardScaler()
     data_to_save = {"mean": self.scaler.mean_.tolist(),
                     "scale": self.scaler.scale_.tolist(),
                     "future_seq_len": self.future_seq_len,
                     "dt_col": self.dt_col,
                     "target_col": self.target_col,
                     "extra_features_col": self.extra_features_col,
                     "drop_missing": self.drop_missing
                     }
     save_config(file_path, data_to_save, replace=replace)
예제 #4
0
    def save(self, model_path, config_path):
        self.model.save_weights(model_path)
        config_to_save = {
            "cnn_height": self.cnn_height,
            "long_num": self.long_num,
            "time_step": self.time_step,
            "ar_window": self.ar_window,
            "cnn_hid_size": self.cnn_hid_size,
            "rnn_hid_sizes": self.rnn_hid_sizes,
            "cnn_dropout": self.cnn_dropout,
            "rnn_dropout": self.rnn_dropout,
            "lr": self.lr,
            "batch_size": self.batch_size,
            # for fit eval
            "epochs": self.epochs,
            # todo: can not serialize metrics unless all elements are str
            "metrics": self.metrics,
            "mc": self.mc,
            "feature_num": self.feature_num,
            "output_dim": self.output_dim,
            "loss": self.loss
        }
        assert set(config_to_save.keys()) == self.saved_configs, \
            "The keys in config_to_save is not the same as self.saved_configs." \
            "Please keep them consistent"
        # if self.decay_epochs > 0:
        #     lr_decay_configs = {"min_lr": self.min_lr,
        #                         "max_lr": self.max_lr}
        #     assert set(lr_decay_configs.keys()) == self.lr_decay_configs, \
        #         "The keys in lr_decay_configs is not the same as self.lr_decay_configs." \
        #         "Please keep them consistent"
        #     config_to_save.update(lr_decay_configs)
        # else:
        #     lr_configs = {"lr": self.lr_value}
        #     assert set(lr_configs.keys()) == self.lr_configs, \
        #         "The keys in lr_configs is not the same as self.lr_configs." \
        #         "Please keep them consistent"
        #     config_to_save.update(lr_configs)

        save_config(config_path, config_to_save)
 def save(self, file_path, replace=False):
     data_to_save = {
         "feature_cols": self.feature_cols,
         "target_col": self.target_col
     }
     save_config(file_path, data_to_save, replace=replace)