def load_model(self, folder_path, file_name = None, is_earlystopping_format = False): if file_name is None: file_name = self.RECOMMENDER_NAME self._print("Loading model from file '{}'".format(folder_path + file_name)) if not is_earlystopping_format: shutil.unpack_archive(folder_path + file_name + ".zip", folder_path + file_name + "/", "zip") dataIO = DataIO(folder_path=folder_path + file_name + "/") data_dict = dataIO.load_data(file_name="fit_attributes") for attrib_name in data_dict.keys(): self.__setattr__(attrib_name, data_dict[attrib_name]) tf.compat.v1.reset_default_graph() q_dims = self.p_dims[::-1] self.vae = _MultVAE_original(self.p_dims, q_dims=q_dims, lr=self.learning_rate, lam=self.l2_reg, random_seed=98765) self.saver, self.logits_var, self.loss_var, self.train_op_var, self.merged_var = self.vae.build_graph() self.sess = tf.compat.v1.Session() self.sess.run(tf.compat.v1.global_variables_initializer()) self.saver.restore(self.sess, folder_path + file_name + "/session") # self.summary_writer = tf.compat.v1.summary.FileWriter(self.log_dir, graph=tf.compat.v1.get_default_graph()) shutil.rmtree(folder_path + file_name + "/", ignore_errors=True) self._print("Loading complete")
def load_data(self, save_folder_path): dataIO = DataIO(folder_path=save_folder_path) global_attributes_dict = dataIO.load_data( file_name="dataset_global_attributes") for attrib_name, attrib_object in global_attributes_dict.items(): self.__setattr__(attrib_name, attrib_object) self.AVAILABLE_URM = dataIO.load_data(file_name="dataset_URM") if self._HAS_ICM: self.AVAILABLE_ICM = dataIO.load_data(file_name="dataset_ICM") self.AVAILABLE_ICM_feature_mapper = dataIO.load_data( file_name="dataset_ICM_mappers") if self._HAS_UCM: self.AVAILABLE_UCM = dataIO.load_data(file_name="dataset_UCM") self.AVAILABLE_UCM_feature_mapper = dataIO.load_data( file_name="dataset_UCM_mappers") if self._HAS_additional_mapper: self.dataset_additional_mappers = dataIO.load_data( file_name="dataset_additional_mappers")
def load_model(self, folder_path, file_name=None): if file_name is None: file_name = self.RECOMMENDER_NAME self._print("Loading model from file '{}'".format(folder_path + file_name)) dataIO = DataIO(folder_path=folder_path) data_dict = dataIO.load_data(file_name=file_name) for attrib_name in data_dict.keys(): self.__setattr__(attrib_name, data_dict[attrib_name]) self._print("Loading complete")
def _load_previously_built_split_and_attributes(self, save_folder_path): """ Loads all URM and ICM :return: """ if self.allow_cold_users: allow_cold_users_suffix = "allow_cold_users" else: allow_cold_users_suffix = "only_warm_users" if self.user_wise: user_wise_string = "user_wise" else: user_wise_string = "global_sample" name_suffix = "_{}_{}".format(allow_cold_users_suffix, user_wise_string) dataIO = DataIO(folder_path=save_folder_path) split_parameters_dict = dataIO.load_data(file_name="split_parameters" + name_suffix) for attrib_name in split_parameters_dict.keys(): self.__setattr__(attrib_name, split_parameters_dict[attrib_name]) self.SPLIT_GLOBAL_MAPPER_DICT = dataIO.load_data( file_name="split_mappers" + name_suffix) self.SPLIT_URM_DICT = dataIO.load_data(file_name="split_URM" + name_suffix) if len(self.dataReader_object.get_loaded_ICM_names()) > 0: self.SPLIT_ICM_DICT = dataIO.load_data(file_name="split_ICM" + name_suffix) self.SPLIT_ICM_MAPPER_DICT = dataIO.load_data( file_name="split_ICM_mappers" + name_suffix) if len(self.dataReader_object.get_loaded_UCM_names()) > 0: self.SPLIT_UCM_DICT = dataIO.load_data(file_name="split_UCM" + name_suffix) self.SPLIT_UCM_MAPPER_DICT = dataIO.load_data( file_name="split_UCM_mappers" + name_suffix)