def cnn_att_adv(is_training): if is_training: with tf.variable_scope('cnn_att_adv', reuse=False): framework = Framework(is_training=True) word_embedding = framework.embedding.word_embedding() pos_embedding = framework.embedding.pos_embedding() embedding = framework.embedding.concat_embedding( word_embedding, pos_embedding) x = framework.encoder.cnn(embedding, FLAGS.hidden_size, framework.mask, activation=tf.nn.relu) logit, repre = framework.selector.attention( x, framework.scope, framework.label_for_select) # Add perturbation loss = framework.classifier.softmax_cross_entropy(logit) new_word_embedding = framework.adversarial(loss, word_embedding) new_embedding = framework.embedding.concat_embedding( new_word_embedding, pos_embedding) # Train with tf.variable_scope('cnn_att_adv', reuse=True): x = framework.encoder.cnn(new_embedding, FLAGS.hidden_size, framework.mask, activation=tf.nn.relu) logit, repre = framework.selector.attention( x, framework.scope, framework.label_for_select) loss = framework.classifier.softmax_cross_entropy(logit) output = framework.classifier.output(logit) framework.init_train_model(loss, output, optimizer=tf.train.GradientDescentOptimizer) framework.load_train_data() framework.train() else: with tf.variable_scope('cnn_att_adv', reuse=False): framework = Framework(is_training=False) word_embedding = framework.embedding.word_embedding() pos_embedding = framework.embedding.pos_embedding() embedding = framework.embedding.concat_embedding( word_embedding, pos_embedding) x = framework.encoder.cnn(embedding, FLAGS.hidden_size, framework.mask, activation=tf.nn.relu) logit, repre = framework.selector.attention( x, framework.scope, framework.label_for_select) framework.init_test_model(logit) framework.load_test_data() framework.test()
def birnn_ave_adv(is_training): if is_training: with tf.variable_scope('birnn_ave_adv', reuse=False): framework = Framework(is_training=True) word_embedding = framework.embedding.word_embedding() pos_embedding = framework.embedding.pos_embedding() embedding = framework.embedding.concat_embedding( word_embedding, pos_embedding) x = framework.encoder.birnn(embedding, FLAGS.hidden_size, framework.length) logit, repre = framework.selector.average(x, framework.scope) # Add perturbation loss = framework.classifier.softmax_cross_entropy(logit) new_word_embedding = framework.adversarial(loss, word_embedding) new_embedding = framework.embedding.concat_embedding( new_word_embedding, pos_embedding) # Train with tf.variable_scope('birnn_ave_adv', reuse=True): x = framework.encoder.birnn(new_embedding, FLAGS.hidden_size, framework.length) logit, repre = framework.selector.average(x, framework.scope) loss = framework.classifier.softmax_cross_entropy(logit) output = output(logit) framework.init_train_model(loss, output, optimizer=tf.train.GradientDescentOptimizer) framework.load_train_data() framework.train() else: with tf.variable_scope('birnn_ave_adv', reuse=False): framework = Framework(is_training=False) word_embedding = framework.embedding.word_embedding() pos_embedding = framework.embedding.pos_embedding() embedding = framework.embedding.concat_embedding( word_embedding, pos_embedding) x = framework.encoder.birnn(embedding, FLAGS.hidden_size, framework.length) logit, repre = framework.selector.average(x, framework.scope) framework.init_test_model(tf.nn.softmax(logit)) framework.load_test_data() framework.test()
def rnn_att_adv(is_training): if is_training: with tf.variable_scope('rnn_att_adv', reuse=False): framework = Framework(is_training=True) word_embedding = framework.embedding.word_embedding() pos_embedding = framework.embedding.pos_embedding() embedding = framework.embedding.concat_embedding( word_embedding, pos_embedding) x = framework.encoder.rnn(embedding) x = framework.selector.attention(x) # Add perturbation loss = framework.classifier.softmax_cross_entropy(x) new_word_embedding = framework.adversarial(loss, word_embedding) new_embedding = framework.embedding.concat_embedding( new_word_embedding, pos_embedding) # Train with tf.variable_scope('rnn_att_adv', reuse=True): x = framework.encoder.rnn(new_embedding) x = framework.selector.attention(x) loss = framework.classifier.softmax_cross_entropy(x) output = framework.classifier.output(x) framework.init_train_model(loss, output, optimizer=tf.train.GradientDescentOptimizer) framework.load_train_data() framework.train() else: with tf.variable_scope('rnn_att_adv', reuse=False): framework = Framework(is_training=False) word_embedding = framework.embedding.word_embedding() pos_embedding = framework.embedding.pos_embedding() embedding = framework.embedding.concat_embedding( word_embedding, pos_embedding) x = framework.encoder.rnn(embedding) x = framework.selector.attention(x) framework.init_test_model(tf.nn.softmax(x)) framework.load_test_data() framework.test()