def model(x, is_training, reuse): common_args = common_layer_args(is_training, reuse) fc_args = make_args(activation=relu, **common_args) logit_args = make_args(activation=None, **common_args) x = embedding(x, 10000, 128, reuse) x = lstm(x, 34, reuse, is_training) logits = fc(x, 2, name='logits', **logit_args) predictions = softmax(logits, name='predictions', **common_args) return end_points(is_training)
def model(x, is_training, reuse, num_classes=2, **kwargs): common_args = common_layer_args(is_training, reuse) fc_args = make_args(activation=relu, **common_args) logit_args = make_args(activation=None, **common_args) x = embedding(x, 10000, 128, reuse) x = bidirectional_rnn(x, LSTMCell(128, reuse), LSTMCell(128, reuse), **common_args) logits = fc(x, num_classes, name='logits', **logit_args) predictions = softmax(logits, name='predictions', **common_args) return end_points(is_training)
def model(x, is_training, reuse, num_classes=2, **kwargs): common_args = common_layer_args(is_training, reuse) fc_args = make_args(activation=relu, **common_args) logit_args = make_args(activation=None, **common_args) x = embedding(x, 10000, 128, reuse) x1 = conv1d(x, 128, name='conv1_1', **common_args) x2 = conv1d(x, 128, filter_size=4, name='conv1_2', **common_args) x3 = conv1d(x, 128, filter_size=5, name='conv1_3', **common_args) x = merge([x1, x2, x3], 'concat', axis=1) x = lstm(x, 384, reuse, is_training) x = dropout(x, drop_p=0.3, **common_args) logits = fc(x, num_classes, name='logits', **logit_args) predictions = softmax(logits, name='predictions', **common_args) return end_points(is_training)
def model(x, is_training, reuse): common_args = common_layer_args(is_training, reuse) fc_args = make_args(activation=relu, **common_args) logit_args = make_args(activation=None, **common_args) x = embedding(x, 10000, 128, reuse) x1 = conv1d(x, 128, name='conv1_1', **common_args) x2 = conv1d(x, 128, filter_size=4, name='conv1_2', **common_args) x3 = conv1d(x, 128, filter_size=5, name='conv1_3', **common_args) x = merge([x1, x2, x3], 'concat', axis=1) x = tf.expand_dims(x, 2) x = global_max_pool(x) x = dropout(x, drop_p=0.3, **common_args) logits = fc(x, 2, name='logits', **logit_args) predictions = softmax(logits, name='predictions', **common_args) return end_points(is_training)