def load_model(self, folder_path, file_name=None):

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        self._print("Loading model from file '{}'".format(folder_path +
                                                          file_name))

        dataIO = DataIO(folder_path=folder_path)
        data_dict = dataIO.load_data(file_name=file_name)

        for attrib_name in data_dict.keys():
            self.__setattr__(attrib_name, data_dict[attrib_name])

        tf.reset_default_graph()
        self.vae = MultiVAE(self.p_dims, lam=0.0)
        self.saver, self.logits_var, self.loss_var, self.train_op_var, self.merged_var = self.vae.build_graph(
        )

        self.sess = tf.Session()
        self.sess.run(tf.global_variables_initializer())

        self.saver.restore(self.sess, folder_path + file_name + "_session")

        self.summary_writer = tf.summary.FileWriter(
            self.log_dir, graph=tf.get_default_graph())

        self._print("Loading complete")
    def save_model(self, folder_path, file_name=None):

        #https://cv-tricks.com/tensorflow-tutorial/save-restore-tensorflow-models-quick-complete-tutorial/

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        self._print("Saving model in file '{}'".format(folder_path +
                                                       file_name))

        saver = tf.train.Saver()
        saver.save(self.sess, folder_path + file_name + "_session")

        data_dict_to_save = {
            "n_users": self.n_users,
            "n_items": self.n_items,
            "batch_size": self.batch_size,
            "total_anneal_steps": self.total_anneal_steps,
            "anneal_cap": self.anneal_cap,
            "update_count": self.update_count,
            "p_dims": self.p_dims,
            "batches_per_epoch": self.batches_per_epoch,
            "log_dir": self.log_dir,
            "chkpt_dir": self.chkpt_dir,
        }

        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name,
                         data_dict_to_save=data_dict_to_save)

        self._print("Saving complete")
Beispiel #3
0
    def save_model(self, folder_path, file_name = None):


        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        self._print("Saving model in file '{}'".format(folder_path + file_name))

        data_dict_to_save = {"k": self.k,
                              "embedding_size": self.embedding_size,
                              "learning_rate": self.learning_rate,
                              "decay": self.decay,
                              "batch_size": self.batch_size,
                              # "model_lamda": self.model_lamda,
                              # "model_U": self.model_U,
                              }

        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name, data_dict_to_save = data_dict_to_save)

        saver = tf.train.Saver()

        saver.save(self.sess, folder_path + file_name + "_session")


        self._print("Saving complete")
def _get_algorithm_metadata_to_print_list(result_folder_path,
                                          algorithm_list,
                                          KNN_similarity_list=None,
                                          ICM_names_list=None,
                                          UCM_names_list=None):
    dataIO = DataIO(folder_path=result_folder_path)

    algorithm_file_name_list = _get_algorithm_file_name_list(
        algorithm_list=algorithm_list,
        KNN_similarity_list=KNN_similarity_list,
        ICM_names_list=ICM_names_list,
        UCM_names_list=UCM_names_list)

    algorithm_metadata_to_print_list = []

    for algorithm_file_dict in algorithm_file_name_list:
        if algorithm_file_dict is None:
            algorithm_metadata_to_print_list.append(None)
            continue

        algorithm_file_name = algorithm_file_dict["algorithm_file_name"]
        search_metadata = None

        if algorithm_file_name is not None:
            try:
                search_metadata = dataIO.load_data(
                    f"{algorithm_file_name}_metadata")
            except FileNotFoundError:
                pass

        algorithm_file_dict["search_metadata"] = search_metadata
        algorithm_metadata_to_print_list.append(algorithm_file_dict)

    return algorithm_metadata_to_print_list
Beispiel #5
0
    def load_model(self, folder_path, file_name=None):

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        self._print("Loading model from file '{}'".format(folder_path +
                                                          file_name))

        # Reload the attributes dictionary
        dataIO = DataIO(folder_path=folder_path)
        data_dict = dataIO.load_data(file_name=file_name)

        for attrib_name in data_dict.keys():
            self.__setattr__(attrib_name, data_dict[attrib_name])

        self.model = get_model(self.n_users, self.n_items, self.num_factors,
                               [e * self.k for e in self.layers],
                               self.reg_layers, self.reg_mf)
        if self.learner.lower() == "adagrad":
            self.model.compile(optimizer=Adagrad(lr=self.learning_rate),
                               loss="binary_crossentropy")
        elif self.learner.lower() == "rmsprop":
            self.model.compile(optimizer=RMSprop(lr=self.learning_rate),
                               loss="binary_crossentropy")
        elif self.learner.lower() == "adam":
            self.model.compile(optimizer=Adam(lr=self.learning_rate),
                               loss="binary_crossentropy")
        else:
            self.model.compile(optimizer=SGD(lr=self.learning_rate),
                               loss="binary_crossentropy")
        self.model.load_weights(folder_path + file_name + "_weights")

        self._print("Loading complete")
Beispiel #6
0
    def save_model(self, folder_path, file_name=None):

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        self._print("Saving model in file '{}'".format(folder_path +
                                                       file_name))

        data_dict_to_save = {
            'layers': self.layers,
            'reg_layers': self.reg_layers,
            'num_negatives': self.num_negatives,
            'learner': self.learner,
            'learning_rate': self.learning_rate,
            'batch_size': self.batch_size,
            'predictive_factors': self.predictive_factors,
        }

        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name,
                         data_dict_to_save=data_dict_to_save)

        saver = tf.train.Saver()
        saver.save(self.sess, folder_path + file_name + "_session")

        self._print("Saving complete")
    def _split_data_from_original_dataset(self, save_folder_path):

        # Perform k random splits.

        self.FOLD_DATA_SPLITTER_LIST = [None] * self.n_folds

        # If save_folder_path is None use default
        if save_folder_path is None and not save_folder_path == False:
            save_folder_path = self._get_default_save_path()

        for fold_index in range(self.n_folds):

            dataSplitter_object_fold = self._split_data_from_original_dataset_fold(
                save_folder_path, fold_index)

            if self.preload_all:
                self.FOLD_DATA_SPLITTER_LIST[
                    fold_index] = dataSplitter_object_fold

        if save_folder_path:
            split_parameters_dict = {
                "n_folds": self.n_folds,
            }

            dataIO = DataIO(folder_path=save_folder_path)

            dataIO.save_data(data_dict_to_save=split_parameters_dict,
                             file_name="split_parameters")

        self._print("Split complete")
Beispiel #8
0
    def get_parameter_values_for_algorithm(algorithm_file_name):

        experiment_subfolder_to_parameters_dict = {}
        parameters_list = None

        for experiment_subfolder in experiment_subfolder_list:

            try:
                dataIO = DataIO(folder_path=result_folder_path +
                                algorithm_name + "_" + experiment_subfolder +
                                "/")
                search_metadata = dataIO.load_data(algorithm_file_name +
                                                   "_metadata")

                search_metadata = search_metadata["hyperparameters_best"]

                if parameters_list is None:
                    parameters_list = list(search_metadata.keys())
                else:
                    assert set(parameters_list) == set(search_metadata.keys(
                    )), "n_parameters {}, len(parameters_dict) {}".format(
                        parameters_list, list(search_metadata.keys()))

            except:
                search_metadata = None

            experiment_subfolder_to_parameters_dict[
                experiment_subfolder] = search_metadata

        return experiment_subfolder_to_parameters_dict, parameters_list
    def save_model(self, folder_path, file_name=None):

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        self._print("Saving model in file '{}'".format(folder_path +
                                                       file_name))

        data_dict_to_save = {
            "W_sparse_Cold_itemKNNCBF":
            self.Recommender_cold.itemKNNCBF.W_sparse,
            "norm_scores_Cold": self.Recommender_cold.norm_scores,
            "W_sparse_Cold_P3Alpha": self.Recommender_cold.RP3beta.W_sparse,
            "alpha_Cold": self.Recommender_cold.alpha,
            "W_sparse_Warm_itemKNNCBF":
            self.Recommender_warm.itemKNNCBF.W_sparse,
            "norm_scores_Warm": self.Recommender_warm.norm_scores,
            "W_sparse_Warm_P3Alpha": self.Recommender_warm.RP3beta.W_sparse,
            "alpha_Warm": self.Recommender_warm.alpha,
            "threshold": self.threshold
        }

        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name,
                         data_dict_to_save=data_dict_to_save)

        self._print("Saving complete")
Beispiel #10
0
 def save_model(self, folder_path, file_name = None):
     if file_name is None:
         file_name = self.RECOMMENDER_NAME
     self._print("Saving model in file '{}'".format(folder_path + file_name))
     dataIO = DataIO(folder_path=folder_path)
     dataIO.save_data(file_name=file_name, data_dict_to_save = {})
     self._print("Saving complete")
    def save_model(self, folder_path, file_name=None):
        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        print("Saving model in file '{}'".format(folder_path + file_name))

        # save graph weights
        self.fm.save_session(folder_path + file_name)
        # save model params
        data_dict_to_save = {
            'pretrain_flag': self.fm.pretrain_flag,
            'hidden_factor': self.fm.hidden_factor,
            'loss_type': 'log_loss',
            'epoch': self.fm.epoch,
            'batch_size': self.fm.batch_size,
            'learning_rate': self.fm.learning_rate,
            'lamda_bilinear': self.fm.lamda_bilinear,
            'keep': self.fm.keep,
            'optimizer_type': self.fm.optimizer_type,
            'batch_norm': self.fm.batch_norm,
            'permutation': self.fm.permutation,
            'verbose': self.fm.verbose,
            'random_seed': self.fm.random_seed,
        }
        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name,
                         data_dict_to_save=data_dict_to_save)

        self._print("Saving complete")
Beispiel #12
0
    def save_model(self, folder_path, file_name=None):

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        self._print("Saving model in file '{}'".format(folder_path +
                                                       file_name))

        self.mf_recommender.save_model(folder_path,
                                       file_name=file_name + "_warm_users")

        data_dict_to_save = {
            "_cold_user_KNN_model_flag":
            self._cold_user_KNN_model_flag,
            "_cold_user_KNN_estimated_factors_flag":
            self._cold_user_KNN_estimated_factors_flag
        }

        if self._cold_user_KNN_model_flag:
            self._ItemKNNRecommender.save_model(folder_path,
                                                file_name=file_name +
                                                "_cold_users")

        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name,
                         data_dict_to_save=data_dict_to_save)

        self._print("Saving complete")
    def save_model(self, folder_path, file_name=None):

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        self._print("Saving model in file '{}'".format(folder_path +
                                                       file_name))

        data_dict_to_save = {
            'learning_rate': self.learning_rate,
            'num_epochs': self.num_epochs,
            'num_negatives': self.num_negatives,
            'dataset_name': self.dataset_name,
            'number_model': self.number_model,
            'plot_model': self.plot_model,
            'current_epoch': self.current_epoch,
            'map_mode': self.map_mode,
            'permutation': self.permutation,
            'verbose': self.verbose,
        }

        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name,
                         data_dict_to_save=data_dict_to_save)
        #
        # self.model.save(folder_path + file_name + "_keras_model.h5")
        self.model.save_weights(folder_path + file_name + "_keras_model.h5",
                                overwrite=True)

        self._print("Saving complete")
    def load_model(self, folder_path, file_name = None):

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        self._print("Loading model from file '{}'".format(folder_path + file_name))

        dataIO = DataIO(folder_path=folder_path)
        data_dict = dataIO.load_data(file_name=file_name)

        for attrib_name in data_dict.keys():
             self.__setattr__(attrib_name, data_dict[attrib_name])


        self._init_model()


        self.model = get_model(self.n_users, self.n_items, self.path_nums, self.timestamps, self.length, self.layers, self.reg_layes, self.latent_dim, self.reg_latent)

        self.model.compile(optimizer = Adam(lr = self.learning_rate, decay = 1e-4),
                      loss = 'binary_crossentropy')

        self.model.load_weights(folder_path + file_name + "_weights")


        self._print("Loading complete")
Beispiel #15
0
    def save_model(self, folder_path, file_name=None):

        #https://cv-tricks.com/tensorflow-tutorial/save-restore-tensorflow-models-quick-complete-tutorial/

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        self._print("Saving model in file '{}'".format(folder_path +
                                                       file_name))

        data_dict_to_save = {
            "n_users": self.n_users,
            "n_items": self.n_items,
            "cmn_config_dict": self.cmn_config_clone.get_dict()
        }

        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name,
                         data_dict_to_save=data_dict_to_save)

        saver = tf.train.Saver()

        saver.save(self.sess, folder_path + file_name + "_session")

        self._print("Saving complete")
    def save_model(self, folder_path, file_name = None):

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        self._print("Saving model in file '{}'".format(folder_path + file_name))

        self.model.save_weights(folder_path + file_name + "_weights", overwrite=True)

        data_dict_to_save = {
            "n_users": self.n_users,
            "n_items": self.n_items,
            "path_nums": self.path_nums,
            "timestamps": self.timestamps,
            "length": self.length,
            "layers": self.layers,
            "reg_layes": self.reg_layes,
            "latent_dim": self.latent_dim,
            "reg_latent": self.reg_latent,
            "learning_rate": self.learning_rate,
        }

        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name, data_dict_to_save = data_dict_to_save)


        self._print("Saving complete")
Beispiel #17
0
    def save_model(self, folder_path, file_name=None):

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        self._print("Saving model in file '{}'".format(folder_path +
                                                       file_name))

        self.model.save_weights(folder_path + file_name + "_weights",
                                overwrite=True)

        data_dict_to_save = {
            "n_users": self.n_users,
            "n_items": self.n_items,
            "mf_dim": self.mf_dim,
            "layers": self.layers,
            "reg_layers": self.reg_layers,
            "reg_mf": self.reg_mf,
        }

        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name,
                         data_dict_to_save=data_dict_to_save)

        self._print("Saving complete")
Beispiel #18
0
def load_data_dict_zip(splitted_data_path, file_name_prefix):
    UCM_DICT = {}
    ICM_DICT = {}

    dataIO = DataIO(folder_path=splitted_data_path)

    URM_DICT = dataIO.load_data(file_name=file_name_prefix + "URM_dict")

    if URM_DICT["__UCM_available"]:
        UCM_DICT = dataIO.load_data(file_name=file_name_prefix + "UCM_dict")

    del URM_DICT["__UCM_available"]

    if URM_DICT["__ICM_available"]:
        ICM_DICT = dataIO.load_data(file_name=file_name_prefix + "ICM_dict")

    del URM_DICT["__ICM_available"]

    loaded_data_dict = {
        "URM_DICT": URM_DICT,
        "UCM_DICT": UCM_DICT,
        "ICM_DICT": ICM_DICT,
    }

    return loaded_data_dict
Beispiel #19
0
    def save_model(self, folder_path, file_name=None):

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        self._print("Saving model in file '{}'".format(folder_path +
                                                       file_name))

        self.model.save_weights(folder_path + file_name + "_weights")

        data_dict_to_save = {
            "n_users": self.n_users,
            "n_items": self.n_items,
            "epochs": self.epochs,
            "batch_size": self.batch_size,
            "num_factors": self.num_factors,
            "layers": self.layers,
            "reg_mf": self.reg_mf,
            "reg_layers": self.reg_layers,
            "num_negatives": self.num_negatives,
            "learning_rate": self.learning_rate,
            "learner": self.learner,
            "k": self.k,
        }

        # Do not change this
        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name,
                         data_dict_to_save=data_dict_to_save)

        self._print("Saving complete")
    def load_model(self, folder_path, file_name=None):

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        self._print("Loading model from file '{}'".format(folder_path +
                                                          file_name))

        dataIO = DataIO(folder_path=folder_path)
        data_dict = dataIO.load_data(file_name=file_name)

        for attrib_name in data_dict.keys():
            if attrib_name == "W_sparse_Cold_P3Alpha":
                self.Recommender_cold.RP3beta.W_sparse = data_dict[attrib_name]
            elif attrib_name == "W_sparse_Cold_itemKNNCBF":
                self.Recommender_cold.itemKNNCBF.W_sparse = data_dict[
                    attrib_name]
            elif attrib_name == "alpha_Cold":
                self.Recommender_cold.alpha = data_dict[attrib_name]
            elif attrib_name == "norm_scores_Cold":
                self.Recommender_cold.norm_scores = data_dict[attrib_name]
            elif attrib_name == "W_sparse_Warm_P3Alpha":
                self.Recommender_warm.RP3beta.W_sparse = data_dict[attrib_name]
            elif attrib_name == "W_sparse_Warm_itemKNNCBF":
                self.Recommender_warm.itemKNNCBF.W_sparse = data_dict[
                    attrib_name]
            elif attrib_name == "alpha_Warm":
                self.Recommender_warm.alpha = data_dict[attrib_name]
            elif attrib_name == "norm_scores_Warm":
                self.Recommender_warm.norm_scores = data_dict[attrib_name]
            elif attrib_name == "threshold":
                self.threshold = data_dict[attrib_name]

        self._print("Loading complete")
Beispiel #21
0
    def load_data(self, save_folder_path):

        dataIO = DataIO(folder_path=save_folder_path)

        global_attributes_dict = dataIO.load_data(
            file_name="dataset_global_attributes")

        for attrib_name, attrib_object in global_attributes_dict.items():
            self.__setattr__(attrib_name, attrib_object)

        self.AVAILABLE_URM = dataIO.load_data(file_name="dataset_URM")

        if self._HAS_ICM > 0:
            self.AVAILABLE_ICM = dataIO.load_data(file_name="dataset_ICM")
            self.AVAILABLE_ICM_feature_mapper = dataIO.load_data(
                file_name="dataset_ICM_mappers")

        if self._HAS_UCM > 0:
            self.AVAILABLE_UCM = dataIO.load_data(file_name="dataset_UCM")
            self.AVAILABLE_UCM_feature_mapper = dataIO.load_data(
                file_name="dataset_UCM_mappers")

        if self._HAS_additional_mapper:
            self.dataset_additional_mappers = dataIO.load_data(
                file_name="dataset_additional_mappers")
Beispiel #22
0
    def _set_search_attributes(
        self,
        recommender_input_args,
        recommender_input_args_last_test,
        metric_to_optimize,
        output_folder_path,
        output_file_name_root,
        resume_from_saved,
        save_metadata,
        save_model,
        evaluate_on_test_each_best_solution,
        n_cases,
    ):

        if save_model not in self._SAVE_MODEL_VALUES:
            raise ValueError(
                "{}: parameter save_model must be in '{}', provided was '{}'.".
                format(self.ALGORITHM_NAME, self._SAVE_MODEL_VALUES,
                       save_model))

        self.output_folder_path = output_folder_path
        self.output_file_name_root = output_file_name_root

        # If directory does not exist, create
        if not os.path.exists(self.output_folder_path):
            os.makedirs(self.output_folder_path)

        self.log_file = open(
            self.output_folder_path + self.output_file_name_root +
            "_{}.txt".format(self.ALGORITHM_NAME),
            "a",
        )

        if save_model == "last" and recommender_input_args_last_test is None:
            self._write_log(
                "{}: parameter save_model is 'last' but no recommender_input_args_last_test provided, saving best model on train data alone."
                .format(self.ALGORITHM_NAME))
            save_model = "best"

        self.recommender_input_args = recommender_input_args
        self.recommender_input_args_last_test = recommender_input_args_last_test
        self.metric_to_optimize = metric_to_optimize
        self.save_model = save_model
        self.resume_from_saved = resume_from_saved
        self.save_metadata = save_metadata
        self.evaluate_on_test_each_best_solution = evaluate_on_test_each_best_solution

        self.model_counter = 0
        self._init_metadata_dict(n_cases=n_cases)

        if self.save_metadata:
            self.dataIO = DataIO(folder_path=self.output_folder_path)
    def saveModel(self, folder_path, file_name = None):

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        print("{}: Saving model in file '{}'".format(self.RECOMMENDER_NAME, folder_path + file_name))

        data_dict_to_save = {"item_pop": self.item_pop}

        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name, data_dict_to_save = data_dict_to_save)

        print("{}: Saving complete".format(self.RECOMMENDER_NAME))
    def _init_factors_saved(self):

        dataIO = DataIO(
            folder_path="SavedModels\\IALS_num_factors=2400_alpha=25\\")
        data_dict = dataIO.load_data(file_name="Jan14_13-52-40")

        for attrib_name in data_dict.keys():
            if attrib_name == "USER_factors":
                self.USER_factors = data_dict[attrib_name]
            elif attrib_name == "ITEM_factors":
                self.ITEM_factors = data_dict[attrib_name]

        self._print("Loading complete")
    def save_model(self, folder_path, file_name = None):

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        self._print("Saving model in file '{}'".format(folder_path + file_name))

        data_dict_to_save = {"W_sparse_itemKNNCBF": self.itemKNNCBF.W_sparse, "norm_scores": self.norm_scores,
                             "W_sparse_itemKNNCF": self.itemKNNCF.W_sparse, "alpha": self.alpha}

        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name, data_dict_to_save = data_dict_to_save)

        self._print("Saving complete")
Beispiel #26
0
    def load_model(self, folder_path, file_name=None):

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        self._print("Loading model from file '{}'".format(folder_path +
                                                          file_name))

        # Reload the attributes dictionary
        dataIO = DataIO(folder_path=folder_path)
        data_dict = dataIO.load_data(file_name=file_name)

        for attrib_name in data_dict.keys():
            self.__setattr__(attrib_name, data_dict[attrib_name])

        # score_model = nn_scoremodel((embed_len,), embed_len, score_act=None)
        score_model = inner_prod_scoremodel((self.embed_len, ),
                                            score_rep_norm=False)
        # score_model = fm_scoremodel((embed_len,), score_rep_norm=False, score_act=None)

        loss = max_margin_loss

        self.model = NetworkRS(self.n_users,
                               self.n_items,
                               self.embed_len,
                               score_model,
                               self.topK,
                               self.topK,
                               embed_regularizer=l2(5e-7),
                               directed=self.directed,
                               mem_filt_alpha=self.fliter_theta,
                               mem_agg_alpha=self.aggre_theta,
                               user_mask=None)

        self.model.first_model.load_weights(folder_path + file_name +
                                            "_first_model_weights")
        self.model.second_model.load_weights(folder_path + file_name +
                                             "_second_model_weights")
        self.model.triplet_model.load_weights(folder_path + file_name +
                                              "_triplet_model_weights")

        self.model.triplet_model.compile(loss=loss, optimizer='adam')

        self.batchGenerator = TripletGenerator(self.G_user, self.model,
                                               self.G_ui, self.G_item)

        self.item_rep = self.get_item_rep()
        self.user_rep = self.get_user_rep()

        self._print("Loading complete")
Beispiel #27
0
    def save_model(self, folder_path, file_name=None):

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        logger.info(f"Saving model in file '{folder_path + file_name}'")

        data_dict_to_save = {"item_pop": self.item_pop}

        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name,
                         data_dict_to_save=data_dict_to_save)

        self._print("Saving complete")
Beispiel #28
0
    def load_model(self, folder_path, file_name=None):

        if file_name is None:
            file_name = self.RECOMMENDER_NAME

        self._print("Loading model from file '{}'".format(folder_path + file_name))

        dataIO = DataIO(folder_path=folder_path)
        data_dict = dataIO.load_data(file_name=file_name)

        for attrib_name in data_dict.keys():
            self.__setattr__(attrib_name, data_dict[attrib_name])

        self._print("Loading complete")
def run_train_with_early_stopping(dataset_name, URM_train, URM_validation,
                                  UCM_CoupledCF, ICM_CoupledCF,
                                  evaluator_validation, evaluator_test,
                                  metric_to_optimize, result_folder_path,
                                  map_mode):

    if not os.path.exists(result_folder_path):
        os.makedirs(result_folder_path)

    article_hyperparameters = get_hyperparameters_for_dataset(dataset_name)
    article_hyperparameters["map_mode"] = map_mode

    earlystopping_hyperparameters = {
        "validation_every_n": 5,
        "stop_on_validation": True,
        "lower_validations_allowed": 5,
        "evaluator_object": evaluator_validation,
        "validation_metric": metric_to_optimize
    }

    parameterSearch = SearchSingleCase(
        CoupledCF_RecommenderWrapper,
        evaluator_validation=evaluator_validation,
        evaluator_test=evaluator_test)

    recommender_input_args = SearchInputRecommenderArgs(
        CONSTRUCTOR_POSITIONAL_ARGS=[URM_train, UCM_CoupledCF, ICM_CoupledCF],
        FIT_KEYWORD_ARGS=earlystopping_hyperparameters)

    recommender_input_args_last_test = recommender_input_args.copy()
    recommender_input_args_last_test.CONSTRUCTOR_POSITIONAL_ARGS[
        0] = URM_train + URM_validation

    parameterSearch.search(
        recommender_input_args,
        recommender_input_args_last_test=recommender_input_args_last_test,
        fit_hyperparameters_values=article_hyperparameters,
        output_folder_path=result_folder_path,
        output_file_name_root=CoupledCF_RecommenderWrapper.RECOMMENDER_NAME,
        save_model="last",
        resume_from_saved=True,
        evaluate_on_test="last")

    dataIO = DataIO(result_folder_path)
    search_metadata = dataIO.load_data(
        file_name=CoupledCF_RecommenderWrapper.RECOMMENDER_NAME +
        "_metadata.zip")

    return search_metadata
    def _run_epoch(self, num_epoch):

        # fit user factors
        # VV = n_factors x n_factors
        VV = self.ITEM_factors.T.dot(self.ITEM_factors)

        for user_id in self.warm_users:
            # get (positive i.e. non-zero scored) items for user

            start_pos = self.C.indptr[user_id]
            end_pos = self.C.indptr[user_id + 1]

            user_profile = self.C.indices[start_pos:end_pos]
            user_confidence = self.C.data[start_pos:end_pos]

            self.USER_factors[user_id, :] = self._update_row(
                user_profile, user_confidence, self.ITEM_factors, VV)

        # fit item factors
        # UU = n_factors x n_factors
        UU = self.USER_factors.T.dot(self.USER_factors)

        for item_id in self.warm_items:

            start_pos = self.C_csc.indptr[item_id]
            end_pos = self.C_csc.indptr[item_id + 1]

            item_profile = self.C_csc.indices[start_pos:end_pos]
            item_confidence = self.C_csc.data[start_pos:end_pos]

            self.ITEM_factors[item_id, :] = self._update_row(
                item_profile, item_confidence, self.USER_factors, UU)

        folder_path = "SavedModels\\IALS_num_factors=2400_alpha=25\\"
        file_name = datetime.now().strftime('%b%d_%H-%M-%S')

        self._print("Saving model in file '{}'".format(folder_path +
                                                       file_name))

        data_dict_to_save = {
            "USER_factors": self.USER_factors,
            "ITEM_factors": self.ITEM_factors,
        }

        dataIO = DataIO(folder_path=folder_path)
        dataIO.save_data(file_name=file_name,
                         data_dict_to_save=data_dict_to_save)

        self._print("Saving complete")