Пример #1
0
    def save_model(self, folder_path, file_name=None):

        #https://cv-tricks.com/tensorflow-tutorial/save-restore-tensorflow-models-quick-complete-tutorial/

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        self._print("Saving model in file '{}'".format(folder_path +
                                                       file_name))

        data_dict_to_save = {
            "n_users": self.n_users,
            "n_items": self.n_items,
            "cmn_config_dict": self.cmn_config_clone.get_dict()
        }

        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name,
                         data_dict_to_save=data_dict_to_save)

        saver = tf.train.Saver()

        saver.save(self.sess, folder_path + file_name + "_session")

        self._print("Saving complete")
Пример #2
0
    def save_model(self, folder_path, file_name = None):


        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        self._print("Saving model in file '{}'".format(folder_path + file_name))

        data_dict_to_save = {"k": self.k,
                              "embedding_size": self.embedding_size,
                              "learning_rate": self.learning_rate,
                              "decay": self.decay,
                              "batch_size": self.batch_size,
                              # "model_lamda": self.model_lamda,
                              # "model_U": self.model_U,
                              }

        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name, data_dict_to_save = data_dict_to_save)

        saver = tf.train.Saver()

        saver.save(self.sess, folder_path + file_name + "_session")


        self._print("Saving complete")
    def save_model(self, folder_path, file_name=None):
        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        print("Saving model in file '{}'".format(folder_path + file_name))

        # save graph weights
        self.fm.save_session(folder_path + file_name)
        # save model params
        data_dict_to_save = {
            'pretrain_flag': self.fm.pretrain_flag,
            'hidden_factor': self.fm.hidden_factor,
            'loss_type': 'log_loss',
            'epoch': self.fm.epoch,
            'batch_size': self.fm.batch_size,
            'learning_rate': self.fm.learning_rate,
            'lamda_bilinear': self.fm.lamda_bilinear,
            'keep': self.fm.keep,
            'optimizer_type': self.fm.optimizer_type,
            'batch_norm': self.fm.batch_norm,
            'permutation': self.fm.permutation,
            'verbose': self.fm.verbose,
            'random_seed': self.fm.random_seed,
        }
        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name,
                         data_dict_to_save=data_dict_to_save)

        self._print("Saving complete")
    def save_model(self, folder_path, file_name=None):

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        self._print("Saving model in file '{}'".format(folder_path +
                                                       file_name))

        data_dict_to_save = {
            "W_sparse_Cold_itemKNNCBF":
            self.Recommender_cold.itemKNNCBF.W_sparse,
            "norm_scores_Cold": self.Recommender_cold.norm_scores,
            "W_sparse_Cold_P3Alpha": self.Recommender_cold.RP3beta.W_sparse,
            "alpha_Cold": self.Recommender_cold.alpha,
            "W_sparse_Warm_itemKNNCBF":
            self.Recommender_warm.itemKNNCBF.W_sparse,
            "norm_scores_Warm": self.Recommender_warm.norm_scores,
            "W_sparse_Warm_P3Alpha": self.Recommender_warm.RP3beta.W_sparse,
            "alpha_Warm": self.Recommender_warm.alpha,
            "threshold": self.threshold
        }

        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name,
                         data_dict_to_save=data_dict_to_save)

        self._print("Saving complete")
Пример #5
0
    def save_model(self, folder_path, file_name=None):

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        self._print("Saving model in file '{}'".format(folder_path +
                                                       file_name))

        data_dict_to_save = {
            'layers': self.layers,
            'reg_layers': self.reg_layers,
            'num_negatives': self.num_negatives,
            'learner': self.learner,
            'learning_rate': self.learning_rate,
            'batch_size': self.batch_size,
            'predictive_factors': self.predictive_factors,
        }

        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name,
                         data_dict_to_save=data_dict_to_save)

        saver = tf.train.Saver()
        saver.save(self.sess, folder_path + file_name + "_session")

        self._print("Saving complete")
Пример #6
0
    def save_model(self, folder_path, file_name=None):

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        self._print("Saving model in file '{}'".format(folder_path +
                                                       file_name))

        self.mf_recommender.save_model(folder_path,
                                       file_name=file_name + "_warm_users")

        data_dict_to_save = {
            "_cold_user_KNN_model_flag":
            self._cold_user_KNN_model_flag,
            "_cold_user_KNN_estimated_factors_flag":
            self._cold_user_KNN_estimated_factors_flag
        }

        if self._cold_user_KNN_model_flag:
            self._ItemKNNRecommender.save_model(folder_path,
                                                file_name=file_name +
                                                "_cold_users")

        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name,
                         data_dict_to_save=data_dict_to_save)

        self._print("Saving complete")
    def save_model(self, folder_path, file_name=None):

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        self._print("Saving model in file '{}'".format(folder_path +
                                                       file_name))

        data_dict_to_save = {
            'learning_rate': self.learning_rate,
            'num_epochs': self.num_epochs,
            'num_negatives': self.num_negatives,
            'dataset_name': self.dataset_name,
            'number_model': self.number_model,
            'plot_model': self.plot_model,
            'current_epoch': self.current_epoch,
            'map_mode': self.map_mode,
            'permutation': self.permutation,
            'verbose': self.verbose,
        }

        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name,
                         data_dict_to_save=data_dict_to_save)
        #
        # self.model.save(folder_path + file_name + "_keras_model.h5")
        self.model.save_weights(folder_path + file_name + "_keras_model.h5",
                                overwrite=True)

        self._print("Saving complete")
Пример #8
0
    def save_model(self, folder_path, file_name=None):

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        self._print("Saving model in file '{}'".format(folder_path +
                                                       file_name))

        self.model.save_weights(folder_path + file_name + "_weights",
                                overwrite=True)

        data_dict_to_save = {
            "n_users": self.n_users,
            "n_items": self.n_items,
            "mf_dim": self.mf_dim,
            "layers": self.layers,
            "reg_layers": self.reg_layers,
            "reg_mf": self.reg_mf,
        }

        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name,
                         data_dict_to_save=data_dict_to_save)

        self._print("Saving complete")
    def save_model(self, folder_path, file_name = None):

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        self._print("Saving model in file '{}'".format(folder_path + file_name))

        self.model.save_weights(folder_path + file_name + "_weights", overwrite=True)

        data_dict_to_save = {
            "n_users": self.n_users,
            "n_items": self.n_items,
            "path_nums": self.path_nums,
            "timestamps": self.timestamps,
            "length": self.length,
            "layers": self.layers,
            "reg_layes": self.reg_layes,
            "latent_dim": self.latent_dim,
            "reg_latent": self.reg_latent,
            "learning_rate": self.learning_rate,
        }

        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name, data_dict_to_save = data_dict_to_save)


        self._print("Saving complete")
    def _split_data_from_original_dataset(self, save_folder_path):

        # Perform k random splits.

        self.FOLD_DATA_SPLITTER_LIST = [None] * self.n_folds

        # If save_folder_path is None use default
        if save_folder_path is None and not save_folder_path == False:
            save_folder_path = self._get_default_save_path()

        for fold_index in range(self.n_folds):

            dataSplitter_object_fold = self._split_data_from_original_dataset_fold(
                save_folder_path, fold_index)

            if self.preload_all:
                self.FOLD_DATA_SPLITTER_LIST[
                    fold_index] = dataSplitter_object_fold

        if save_folder_path:
            split_parameters_dict = {
                "n_folds": self.n_folds,
            }

            dataIO = DataIO(folder_path=save_folder_path)

            dataIO.save_data(data_dict_to_save=split_parameters_dict,
                             file_name="split_parameters")

        self._print("Split complete")
Пример #11
0
    def save_model(self, folder_path, file_name=None):

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        self._print("Saving model in file '{}'".format(folder_path +
                                                       file_name))

        self.model.save_weights(folder_path + file_name + "_weights")

        data_dict_to_save = {
            "n_users": self.n_users,
            "n_items": self.n_items,
            "epochs": self.epochs,
            "batch_size": self.batch_size,
            "num_factors": self.num_factors,
            "layers": self.layers,
            "reg_mf": self.reg_mf,
            "reg_layers": self.reg_layers,
            "num_negatives": self.num_negatives,
            "learning_rate": self.learning_rate,
            "learner": self.learner,
            "k": self.k,
        }

        # Do not change this
        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name,
                         data_dict_to_save=data_dict_to_save)

        self._print("Saving complete")
Пример #12
0
 def save_model(self, folder_path, file_name = None):
     if file_name is None:
         file_name = self.RECOMMENDER_NAME
     self._print("Saving model in file '{}'".format(folder_path + file_name))
     dataIO = DataIO(folder_path=folder_path)
     dataIO.save_data(file_name=file_name, data_dict_to_save = {})
     self._print("Saving complete")
    def save_model(self, folder_path, file_name=None):

        #https://cv-tricks.com/tensorflow-tutorial/save-restore-tensorflow-models-quick-complete-tutorial/

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        self._print("Saving model in file '{}'".format(folder_path +
                                                       file_name))

        saver = tf.train.Saver()
        saver.save(self.sess, folder_path + file_name + "_session")

        data_dict_to_save = {
            "n_users": self.n_users,
            "n_items": self.n_items,
            "batch_size": self.batch_size,
            "total_anneal_steps": self.total_anneal_steps,
            "anneal_cap": self.anneal_cap,
            "update_count": self.update_count,
            "p_dims": self.p_dims,
            "batches_per_epoch": self.batches_per_epoch,
            "log_dir": self.log_dir,
            "chkpt_dir": self.chkpt_dir,
        }

        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name,
                         data_dict_to_save=data_dict_to_save)

        self._print("Saving complete")
    def saveModel(self, folder_path, file_name = None):

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        print("{}: Saving model in file '{}'".format(self.RECOMMENDER_NAME, folder_path + file_name))

        data_dict_to_save = {"item_pop": self.item_pop}

        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name, data_dict_to_save = data_dict_to_save)

        print("{}: Saving complete".format(self.RECOMMENDER_NAME))
Пример #15
0
    def save_model(self, folder_path, file_name=None):

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        logger.info(f"Saving model in file '{folder_path + file_name}'")

        data_dict_to_save = {"item_pop": self.item_pop}

        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name,
                         data_dict_to_save=data_dict_to_save)

        self._print("Saving complete")
    def save_model(self, folder_path, file_name = None):

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        self._print("Saving model in file '{}'".format(folder_path + file_name))

        data_dict_to_save = {"W_sparse_itemKNNCBF": self.itemKNNCBF.W_sparse, "norm_scores": self.norm_scores,
                             "W_sparse_itemKNNCF": self.itemKNNCF.W_sparse, "alpha": self.alpha}

        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name, data_dict_to_save = data_dict_to_save)

        self._print("Saving complete")
Пример #17
0
    def _run_epoch(self, num_epoch):

        # fit user factors
        # VV = n_factors x n_factors
        VV = self.ITEM_factors.T.dot(self.ITEM_factors)

        for user_id in self.warm_users:
            # get (positive i.e. non-zero scored) items for user

            start_pos = self.C.indptr[user_id]
            end_pos = self.C.indptr[user_id + 1]

            user_profile = self.C.indices[start_pos:end_pos]
            user_confidence = self.C.data[start_pos:end_pos]

            self.USER_factors[user_id, :] = self._update_row(
                user_profile, user_confidence, self.ITEM_factors, VV)

        # fit item factors
        # UU = n_factors x n_factors
        UU = self.USER_factors.T.dot(self.USER_factors)

        for item_id in self.warm_items:

            start_pos = self.C_csc.indptr[item_id]
            end_pos = self.C_csc.indptr[item_id + 1]

            item_profile = self.C_csc.indices[start_pos:end_pos]
            item_confidence = self.C_csc.data[start_pos:end_pos]

            self.ITEM_factors[item_id, :] = self._update_row(
                item_profile, item_confidence, self.USER_factors, UU)

        folder_path = "SavedModels\\IALS_num_factors=2400_alpha=25\\"
        file_name = datetime.now().strftime('%b%d_%H-%M-%S')

        self._print("Saving model in file '{}'".format(folder_path +
                                                       file_name))

        data_dict_to_save = {
            "USER_factors": self.USER_factors,
            "ITEM_factors": self.ITEM_factors,
        }

        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name,
                         data_dict_to_save=data_dict_to_save)

        self._print("Saving complete")
def save_data_dict_zip(URM_DICT, ICM_DICT, splitted_data_path,
                       file_name_prefix):

    dataIO = DataIO(folder_path=splitted_data_path)

    URM_DICT["__ICM_available"] = len(ICM_DICT) > 0

    dataIO.save_data(data_dict_to_save=URM_DICT,
                     file_name=file_name_prefix + "URM_dict")

    del URM_DICT["__ICM_available"]

    if len(ICM_DICT) > 0:
        dataIO.save_data(data_dict_to_save=ICM_DICT,
                         file_name=file_name_prefix + "ICM_dict")
Пример #19
0
    def save_model(self, folder_path, file_name=None):

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        self._print("Saving model in file '{}'".format(folder_path +
                                                       file_name))

        data_dict_to_save = {"Hybrid Ratings": self.get_expected_ratings()}

        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name,
                         data_dict_to_save=data_dict_to_save)

        self._print("Saving complete")
Пример #20
0
def run_evaluation_ablation(recommender_class, recommender_input_args,
                            evaluator_test, input_folder_path,
                            result_folder_path, map_mode):

    recommender_object = recommender_class(
        *recommender_input_args.CONSTRUCTOR_POSITIONAL_ARGS)
    file_name_input = recommender_object.RECOMMENDER_NAME + "_best_model_last"
    file_name_output = recommender_object.RECOMMENDER_NAME

    if os.path.exists(result_folder_path + file_name_output + "_metadata.zip"):
        return

    result_folder_path_temp = result_folder_path + "__temp_model/"

    # If directory does not exist, create
    if not os.path.exists(result_folder_path_temp):
        os.makedirs(result_folder_path_temp)

    recommender_object.load_model(input_folder_path, file_name_input)
    recommender_object.save_model(result_folder_path_temp, file_name_output)

    # Alter saved object to force in the desired map mode
    dataIO = DataIO(folder_path=result_folder_path_temp)
    data_dict = dataIO.load_data(file_name=file_name_output)

    data_dict["map_mode"] = map_mode
    dataIO.save_data(file_name=file_name_output, data_dict_to_save=data_dict)

    recommender_object = recommender_class(
        *recommender_input_args.CONSTRUCTOR_POSITIONAL_ARGS)
    recommender_object.load_model(result_folder_path_temp,
                                  file_name=file_name_output)

    results_dict, results_run_string = evaluator_test.evaluateRecommender(
        recommender_object)

    shutil.rmtree(result_folder_path_temp, ignore_errors=True)

    result_file = open(result_folder_path + file_name_output + ".txt", "w")
    result_file.write(results_run_string)
    result_file.close()

    results_dict = {"result_on_last": results_dict}

    dataIO = DataIO(folder_path=result_folder_path)
    dataIO.save_data(file_name=file_name_output + "_metadata",
                     data_dict_to_save=results_dict)
def run_permutation_pretrained_FM(URM_train_full, CFM_data_class_full, pretrained_model_folder_path, result_folder_path, permutation_index, permutation):

    result_folder_path_permutation = result_folder_path + "{}/{}_{}/".format("FM", "FM", permutation_index)

    # Read the pretraining data and put the permutation in it
    recommender_object = FM_Wrapper(URM_train_full, CFM_data_class_full)
    file_name_input = recommender_object.RECOMMENDER_NAME + "_best_model_last"
    file_name_output = recommender_object.RECOMMENDER_NAME

    if os.path.exists(result_folder_path_permutation + file_name_output + "_metadata.zip"):
        return


    result_folder_path_temp = result_folder_path_permutation + "__temp_model/"

    # If directory does not exist, create
    if not os.path.exists(result_folder_path_temp):
        os.makedirs(result_folder_path_temp)

    recommender_object.load_model(pretrained_model_folder_path, file_name_input)
    recommender_object.save_model(result_folder_path_temp, file_name_output)

    # Alter saved object to force in the desired permutation
    dataIO = DataIO(folder_path = result_folder_path_temp)
    data_dict = dataIO.load_data(file_name = file_name_output)

    data_dict["permutation"] = permutation
    dataIO.save_data(file_name = file_name_output, data_dict_to_save = data_dict)


    recommender_object = FM_Wrapper(URM_train_full, CFM_data_class_full)
    recommender_object.load_model(result_folder_path_temp,
                                  file_name=file_name_output)

    results_dict, results_run_string = evaluator_test.evaluateRecommender(recommender_object)

    shutil.rmtree(result_folder_path_temp, ignore_errors=True)

    result_file = open(result_folder_path_permutation + file_name_output + ".txt", "w")
    result_file.write(results_run_string)
    result_file.close()

    results_dict = {"result_on_last": results_dict}

    dataIO = DataIO(folder_path = result_folder_path_permutation)
    dataIO.save_data(file_name = file_name_output + "_metadata",
                     data_dict_to_save = results_dict)
Пример #22
0
    def save_model(self, folder_path, file_name = None):


        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        self._print("Saving model in file '{}'".format(folder_path + file_name))

        data_dict_to_save = {'_args_{}'.format(key):value for (key,value) in vars(self.args).items()}

        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name, data_dict_to_save = data_dict_to_save)

        saver = ConvNCF.tf.train.Saver()
        saver.save(ConvNCF._sess, folder_path + file_name + "_session")

        self._print("Saving complete")
    def save_model(self, folder_path, file_name=None):

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        self._print("Saving model in file '{}'".format(folder_path +
                                                       file_name))

        data_dict_to_save = {
            "Item_Factors_svd": self.svd.ITEM_factors,
            "User_Factors_svd": self.svd.USER_factors,
            "norm_scores": self.norm_scores,
            "W_sparse_P3Alpha": self.P3alpha.W_sparse,
            "alpha": self.alpha
        }

        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name,
                         data_dict_to_save=data_dict_to_save)

        self._print("Saving complete")
Пример #24
0
    def _save_split(self, save_folder_path):

        if save_folder_path:

            if self.allow_cold_users:
                allow_cold_users_suffix = "allow_cold_users"

            else:
                allow_cold_users_suffix = "only_warm_users"

            if self.use_validation_set:
                validation_set_suffix = "use_validation_set"
            else:
                validation_set_suffix = "no_validation_set"

            name_suffix = "_{}_{}".format(allow_cold_users_suffix,
                                          validation_set_suffix)

            split_parameters_dict = {
                "k_out_value": self.k_out_value,
                "allow_cold_users": self.allow_cold_users
            }

            dataIO = DataIO(folder_path=save_folder_path)

            dataIO.save_data(data_dict_to_save=split_parameters_dict,
                             file_name="split_parameters" + name_suffix)

            dataIO.save_data(data_dict_to_save=self.SPLIT_GLOBAL_MAPPER_DICT,
                             file_name="split_mappers" + name_suffix)

            dataIO.save_data(data_dict_to_save=self.SPLIT_URM_DICT,
                             file_name="split_URM" + name_suffix)

            if len(self.dataReader_object.get_loaded_ICM_names()) > 0:
                dataIO.save_data(data_dict_to_save=self.SPLIT_ICM_DICT,
                                 file_name="split_ICM" + name_suffix)

                dataIO.save_data(data_dict_to_save=self.SPLIT_ICM_MAPPER_DICT,
                                 file_name="split_ICM_mappers" + name_suffix)
    def save_model(self, folder_path, file_name=None):

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        self._print("Saving model in file '{}'".format(folder_path +
                                                       file_name))

        data_dict_to_save = {
            "USER_factors":
            self.USER_factors,
            "ITEM_factors":
            self.ITEM_factors,
            "use_bias":
            self.use_bias,
            "_cold_user_mask":
            self._cold_user_mask,
            "_cold_user_KNN_model_flag":
            self._cold_user_KNN_model_flag,
            "_cold_user_KNN_estimated_factors_flag":
            self._cold_user_KNN_estimated_factors_flag
        }

        if self.use_bias:
            data_dict_to_save["ITEM_bias"] = self.ITEM_bias
            data_dict_to_save["USER_bias"] = self.USER_bias
            data_dict_to_save["GLOBAL_bias"] = self.GLOBAL_bias

        if self._cold_user_KNN_model_flag:
            data_dict_to_save[
                "_ItemKNNRecommender_W_sparse"] = self._ItemKNNRecommender.W_sparse
            data_dict_to_save[
                "_ItemKNNRecommender_topK"] = self._ItemKNNRecommender_topK

        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name,
                         data_dict_to_save=data_dict_to_save)

        self._print("Saving complete")
Пример #26
0
    def saveModel(self, folder_path, file_name = None):

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        print("{}: Saving model in file '{}'".format(self.RECOMMENDER_NAME, folder_path + file_name))

        data_dict_to_save = {"USER_factors": self.USER_factors,
                              "ITEM_factors": self.ITEM_factors,
                              "use_bias": self.use_bias,
                              "_cold_user_mask": self._cold_user_mask}

        if self.use_bias:
            data_dict_to_save["ITEM_bias"] = self.ITEM_bias
            data_dict_to_save["USER_bias"] = self.USER_bias
            data_dict_to_save["GLOBAL_bias"] = self.GLOBAL_bias

        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name, data_dict_to_save = data_dict_to_save)


        print("{}: Saving complete".format(self.RECOMMENDER_NAME, folder_path + file_name))
Пример #27
0
    def save_model(self, folder_path, file_name=None):

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        self._print("Saving model in file '{}'".format(folder_path +
                                                       file_name))

        data_dict_to_save = {
            # 'global_var': (#MF_BPR._user_input,
            #                 #MF_BPR._item_input_pos,
            #                 #MF_BPR._batch_size,
            #                 #MF_BPR._index,
            #                 #MF_BPR._model,
            #                 #MF_BPR._dataset,
            #                 #MF_BPR._K,
            #                 #MF_BPR._feed_dict,
            #                 #MF_BPR._output,
            #                 # MF_BPR._exclude_gtItem,
            #                 MF_BPR._user_exclude_validation
            #                 ),
            **{
                '_args_{}'.format(key): value
                for (key, value) in vars(self.args).items()
            }
        }

        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name,
                         data_dict_to_save=data_dict_to_save)

        self.model_GMF.saveParams(self.sess, file_name + "_latent_factors",
                                  self.args)

        # saver = MF_BPR.tf.train.Saver()
        # saver.save(MF_BPR._sess, folder_path + file_name + "_session")

        self._print("Saving complete")
Пример #28
0
    def save_data(self, save_folder_path):

        dataIO = DataIO(folder_path = save_folder_path)
        
        global_attributes_dict = {
            "DATASET_NAME": self.DATASET_NAME
        }

        dataIO.save_data(file_name = "dataset_global_attributes", data_dict_to_save = global_attributes_dict)

        dataIO.save_data(data_dict_to_save = self.URM_train_av, file_name = "dataset_URM_train")

        dataIO.save_data(data_dict_to_save = self.URM_validation_av, file_name = "dataset_URM_validation")

        dataIO.save_data(data_dict_to_save = self.URM_test_av,
            file_name = "dataset_URM_test")
Пример #29
0
    def _save_dataset(self, save_folder_path):

        dataIO = DataIO(folder_path=save_folder_path)

        dataIO.save_data(data_dict_to_save=self._LOADED_GLOBAL_MAPPER_DICT,
                         file_name="dataset_global_mappers")

        dataIO.save_data(data_dict_to_save=self._LOADED_URM_DICT,
                         file_name="dataset_URM")

        if len(self.get_loaded_ICM_names()) > 0:
            dataIO.save_data(data_dict_to_save=self._LOADED_ICM_DICT,
                             file_name="dataset_ICM")

            dataIO.save_data(data_dict_to_save=self._LOADED_ICM_MAPPER_DICT,
                             file_name="dataset_ICM_mappers")
Пример #30
0
    def save_model(self, folder_path, file_name=None):

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        self._print("Saving model in file '{}'".format(folder_path +
                                                       file_name))

        self.model.first_model.save_weights(folder_path + file_name +
                                            "_first_model_weights")
        self.model.second_model.save_weights(folder_path + file_name +
                                             "_second_model_weights")
        self.model.triplet_model.save_weights(folder_path + file_name +
                                              "_triplet_model_weights")

        data_dict_to_save = {
            "n_users": self.n_users,
            "n_items": self.n_items,
            "embed_len": self.embed_len,
            "topK": self.topK,
            "fliter_theta": self.fliter_theta,
            "aggre_theta": self.aggre_theta,
            "batch_size": self.batch_size,
            "samples": self.samples,
            "margin": self.margin,
            "epochs": self.epochs,
            "iter_without_att": self.iter_without_att,
            "directed": self.directed,
        }

        # Do not change this
        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name,
                         data_dict_to_save=data_dict_to_save)

        self._print("Saving complete")