def set_graph(self, embedding_matrix): with self.graph.as_default(): # tf Graph input tf.set_random_seed(1) self.x = tf.placeholder(tf.int32, shape=(self.cfg.bsize, self.cfg.max_seq_len), name="x") self.y = tf.placeholder(tf.float32, shape=(self.cfg.bsize,6), name="y") self.em = tf.placeholder(tf.float32, shape=(embedding_matrix.shape[0], embedding_matrix.shape[1]), name="em") self.keep_prob = tf.placeholder(dtype=tf.float32, name="keep_prob") self.output = model_baseline(self.em,self.x,self.keep_prob) #self.cfg.bsize with tf.variable_scope('logits'): self.logits = self.output with tf.variable_scope('loss'): self.loss = binary_crossentropy(self.y,self.logits) self.cost = tf.losses.log_loss(predictions=self.logits, labels=self.y) (_, self.auc_update_op) = tf.metrics.auc(predictions=self.logits,labels=self.y,curve='ROC') self.global_step = tf.Variable(0, trainable=False) self.learning_rate = tf.train.exponential_decay(self.cfg.lr, self.global_step,self.cfg.decay_steps, self.cfg.decay, staircase=True) with tf.variable_scope('optim'): self.optimizer = tf.train.RMSPropOptimizer(learning_rate=self.learning_rate).minimize(self.loss,global_step=self.global_step) with tf.variable_scope('saver'): self.saver = tf.train.Saver(max_to_keep=self.cfg.max_models_to_keep)
X_valid = X[:split_at] Y_valid = Y[:split_at] X_train = X[split_at:] Y_train = Y[split_at:] graph = tf.Graph() with graph.as_default(): with tf.variable_scope('Input'): x = tf.placeholder(dtype=tf.int32,shape=(None,maxlen)) y = tf.placeholder(dtype=tf.float32,shape=(None,6)) logits = loss = binary_crossentropy(y, logits) cost = tf.losses.log_loss(labels=y, predictions=logits) optimizer = tf.train.RMSPropOptimizer(learning_rate=0.01).minimize(loss) (_, auc_update_op) = tf.contrib.metrics.streaming_auc( predictions=logits, labels=y, curve='ROC') train_iters = len(X_train) - 2*bsize valid_iters = len(X_valid) - 2*bsize with tf.Session(graph=graph) as sess: init = tf.global_variables_initializer() sess.run(init) for epoch in range(EPOCHS):