def __init__(self, batch_size=256): self.batch_size = batch_size self.best_loss = 9999999 self.losses = {'train': [], 'test': []} self.ub = UserBehavior() user_feature, item_feature = self.get_inputs() # 获得特征 self.user_embedding, self.item_feature_embedding = self.ub.load_train_vector( ) self.user_embedding = tf.keras.layers.Reshape([40], name='user_embedding')( self.user_embedding) self.item_feature_embedding = tf.keras.layers.Reshape( [43], name="item_feature_embedding")(self.item_feature_embedding) inference = tf.keras.layers.Lambda( lambda layer: tf.reduce_sum(layer[0] * layer[1], axis=1), name="inference")( (self.user_embedding, self.item_feature_embedding)) inference = tf.keras.layers.Lambda( lambda layer: tf.expand_dims(layer, axis=1))(inference) self.model = tf.keras.Model(inputs=[user_feature, item_feature], outputs=[inference]) self.model.summary() self.optimizer = tf.keras.optimizers.Adam(lr=0.001) # MSE损失,将计算值回归到评分 self.ComputeLoss = tf.keras.losses.MeanSquaredError() self.ComputeMetrics = tf.keras.metrics.MeanAbsoluteError() if tf.io.gfile.exists(self.MODEL_DIR): # print('Removing existing model dir: {}'.format(MODEL_DIR)) # tf.io.gfile.rmtree(MODEL_DIR) pass else: tf.io.gfile.makedirs(self.MODEL_DIR) train_dir = os.path.join(self.MODEL_DIR, 'summaries', 'train') test_dir = os.path.join(self.MODEL_DIR, 'summaries', 'eval') checkpoint_dir = os.path.join(self.MODEL_DIR, 'checkpoints') self.checkpoint_prefix = os.path.join(checkpoint_dir, 'ckpt') self.checkpoint = tf.train.Checkpoint(model=self.model, optimizer=self.optimizer) # Restore variables on creation if a checkpoint exists. self.checkpoint.restore(tf.train.latest_checkpoint(checkpoint_dir)) print("finish init")
if avg_loss.result() < best_loss: best_loss = avg_loss.result() print("best loss = {}".format(best_loss)) network.checkpoint.save(config.checkpoint_prefix) def forward(self, xs): predictions = self.model(xs) # logits = tf.nn.softmax(predictions) return predictions if __name__ == '__main__': config = RecommenderNetworkConfig() ub = UserBehavior() best_loss = 9999999 losses = {'train': [], 'test': []} network = RecommenderNetwork(config) # 获得特征 # user_embedding, item_feature_embedding, user_item_score = ub.load_train_vector() user_embedding_chunks, item_feature_embedding_chunks, user_item_score_chunks = ub.chunk_load_train_vector( ) # model_input_x = [] # model_input_x.append(user_embedding) # model_input_x.append(item_feature_embedding) # model_input_x = list(zip(user_embedding, item_feature_embedding)) model_input_x = list( zip(user_embedding_chunks, item_feature_embedding_chunks))