Beispiel #1
0
def construct(M, N):
    # placeholders = {
    #     'embedding': tf.placeholder(tf.float32, shape=(None, FLAGS.dim)),
    #     'train_mask': tf.placeholder(tf.int32, shape=(N,)),
    #     'labels': tf.placeholder(tf.float32, shape=(None, M))
    #
    # }
    placeholders = {
        # 'support': [tf.sparse_placeholder(tf.float32) for _ in range(num_supports)],
        'support_': None,
        'output_dim': 39,
    }

    if FLAGS.embed == 0 or FLAGS.embed == 3:
        placeholders['embedding'] = tf.placeholder(tf.float32,
                                                   shape=(None, 100))
        placeholders['train_mask'] = tf.placeholder(tf.int32, shape=(N, ))
        placeholders['labels'] = tf.placeholder(tf.float32, shape=(None, 39))

    optimizer = tf.train.AdamOptimizer(learning_rate=0.01)
    preds = fully_connected(placeholders['embedding'],
                            M,
                            activation_fn=lambda x: x)
    loss = masked_softmax_cross_entropy(preds, placeholders['labels'],
                                        placeholders['train_mask'])
    opt_op = optimizer.minimize(loss)
    return placeholders, opt_op, loss, preds
Beispiel #2
0
    def _loss(self):
        # Weight decay loss
        for var in self.layers[0].vars.values():
            self.loss += FLAGS.weight_decay * tf.nn.l2_loss(var)

        # Cross entropy error
        self.loss += masked_softmax_cross_entropy(
            self.outputs, self.placeholders['labels'],
            self.placeholders['labels_mask'])
 def set_loss(self):
     """
     损失函数
     """
     # Weight decay loss
     with tf.name_scope("loss"):
         self.loss = 0
         for var in self.vars:
             self.loss += self.params.weight_decay * tf.nn.l2_loss(var)
         # Cross entropy error
         self.loss += metrics.masked_softmax_cross_entropy(self.outputs, self.labels, self.labels_mask)
Beispiel #4
0
    def _loss(self):
        # Weight decay loss
        for var in self.layers[0].vars.values():
            self.loss += FLAGS.weight_decay * tf.nn.l2_loss(var)

        # Cross entropy error
        if self.categorical:
            self.loss += metrics.masked_softmax_cross_entropy(self.outputs, self.placeholders['labels'],
                    self.placeholders['labels_mask'])
        # L2
        else:
            diff = self.labels - self.outputs
            self.loss += tf.reduce_sum(tf.sqrt(tf.reduce_sum(diff * diff, axis=1)))
Beispiel #5
0
    def _loss(self):
        """Construct the loss function."""
        # Weight decay loss
        if FLAGS.weight_decay > 0.0:
            for var in self.layers[0].vars.values():
                self.loss += FLAGS.weight_decay * tf.nn.l2_loss(var)

        # Cross entropy error
        if self.multilabel:
            self.loss += metrics.masked_sigmoid_cross_entropy(
                self.outputs, self.placeholders['labels'],
                self.placeholders['labels_mask'])
        else:
            self.loss += metrics.masked_softmax_cross_entropy(
                self.outputs, self.placeholders['labels'],
                self.placeholders['labels_mask'])