예제 #1
0
            self.feed_vars = [input, index]
            self.fetch_vars = [result]
            if backward:
                self.append_gradients(result, [input])


class TFGather(tensorflow_api.TensorflowAPIBenchmarkBase):
    def build_graph(self, backward=False, dtype=None):
        import tensorflow as tf

        self.name = "gather"
        self.allow_growth = True

        input = tf.placeholder(name='input',
                               shape=config.input_shape,
                               dtype=tf.float32)
        index = tf.placeholder(name='index',
                               shape=config.index_shape,
                               dtype=tf.int32)
        result = tf.gather(params=input, indices=index)

        self.feed_list = [input, index]
        self.fetch_list = [result]
        if backward:
            self.append_gradients(result, [input, index])


if __name__ == '__main__':
    test_main(PDGather(), TFGather(), feed_spec=config.feed_spec)
예제 #2
0
                              dtype='float32',
                              lod_level=0)
            data.stop_gradient = False

            value, indices = fluid.layers.topk(input=data, k=5)

            self.feed_vars = [data]
            self.fetch_vars = [value, indices]
            if backward:
                self.append_gradients([value, indices], [data])


class TFTopK(tensorflow_api.TensorflowAPIBenchmarkBase):
    def build_graph(self, backward=False, dtype=None):
        import tensorflow as tf

        self.name = "topk"
        self.allow_growth = True

        data = tf.placeholder(name='data', shape=[16, 1000], dtype=tf.float32)
        value, indices = tf.math.top_k(input=data, k=5)

        self.feed_list = [data]
        self.fetch_list = [value, indices]
        if backward:
            self.append_gradients([value, indices], [data])


if __name__ == '__main__':
    test_main(PDTopK(), TFTopK(), feed_spec=None)
예제 #3
0
                seed=123,
                dropout_implementation="upscale_in_train")

            self.feed_vars = [data]
            self.fetch_vars = [result]
            if backward:
                self.append_gradients(result, [data])


class TFDropout(tensorflow_api.TensorflowAPIBenchmarkBase):
    def build_graph(self, backward=False, dtype=None):
        import tensorflow as tf

        self.name = "dropout"
        self.allow_growth = True

        data = tf.placeholder(name='data',
                              shape=[10, 10, 100, 100],
                              dtype=tf.float32)
        result = tf.nn.dropout(x=data, rate=0.2, noise_shape=None, seed=123)

        self.feed_list = [data]
        self.fetch_list = [result]
        if backward:
            self.append_gradients(result, [data])


if __name__ == '__main__':
    # Not consitent!!!
    test_main(PDDropout(), TFDropout(), feed_spec=None)
예제 #4
0
            self.feed_vars = [data1, data2]
            self.fetch_vars = [result]
            if backward:
                self.append_gradients(result, [data1, data2])


class TFConcat(tensorflow_api.TensorflowAPIBenchmarkBase):
    def build_graph(self, backward=False):
        import tensorflow as tf

        self.name = "concat"
        self.allow_growth = True

        data1 = tf.placeholder(name='data1',
                               shape=[100, 200],
                               dtype=tf.float32)
        data2 = tf.placeholder(name='data2',
                               shape=[100, 200],
                               dtype=tf.float32)
        result = tf.concat([data1, data2], 0)

        self.feed_list = [data1, data2]
        self.fetch_list = [result]
        if backward:
            self.append_gradients(result, [data1, data2])


if __name__ == '__main__':
    test_main(PDConcat(), TFConcat(), feed_spec=None)

class TFSoftmaxWithCrossEntropy(tensorflow_api.TensorflowAPIBenchmarkBase):
    def build_graph(self, backward=False, dtype=None):
        import tensorflow as tf

        self.name = "softmax_with_cross_entropy"
        self.allow_growth = True

        input = tf.placeholder(name='input',
                               shape=config.input_shape,
                               dtype=tf.float32)
        label = tf.placeholder(name='label',
                               shape=config.label_shape(for_tensorflow=True),
                               dtype=tf.int32)
        onehot_label = tf.one_hot(indices=label, depth=config.num_classes)
        result = tf.losses.softmax_cross_entropy(logits=input,
                                                 onehot_labels=onehot_label)

        self.feed_list = [input, label]
        self.fetch_list = [result]
        if backward:
            self.append_gradients(result, [input])


if __name__ == '__main__':
    # Not consistent!!!
    test_main(PDSoftmaxWithCrossEntropy(),
              TFSoftmaxWithCrossEntropy(),
              feed_spec=config.feed_spec)
예제 #6
0
                               lod_level=0)
            input.stop_gradient = False
            result = fluid.layers.cast(input, dtype=config.dtype)

            self.feed_vars = [input]
            self.fetch_vars = [result]
            if backward:
                self.append_gradients(result, [input])


class TFCast(tensorflow_api.TensorflowAPIBenchmarkBase):
    def build_graph(self, backward=False, dtype=None):
        import tensorflow as tf

        self.name = "cast"
        self.allow_growth = True

        input = tf.placeholder(name='input',
                               shape=config.input_shape,
                               dtype=tf.float32)
        result = tf.cast(input, dtype=config.dtype)

        self.feed_list = [input]
        self.fetch_list = [result]
        if backward:
            self.append_gradients(result, [input])


if __name__ == '__main__':
    test_main(PDCast(), TFCast(), feed_spec=config.feed_spec)
예제 #7
0
                              lod_level=0)
            data.stop_gradient = False
            result = fluid.layers.squeeze(data, axes=[])

            self.feed_vars = [data]
            self.fetch_vars = [result]
            if backward:
                self.append_gradients(result, [data])


class TFSqueeze(tensorflow_api.TensorflowAPIBenchmarkBase):
    def build_graph(self, backward=False, dtype=None):
        import tensorflow as tf

        self.name = "squeeze"
        self.allow_growth = True

        data = tf.placeholder(name='data',
                              shape=[1, 10, 100, 1, 100],
                              dtype=tf.float32)
        result = tf.squeeze(data)

        self.feed_list = [data]
        self.fetch_list = [result]
        if backward:
            self.append_gradients(result, [data])


if __name__ == '__main__':
    test_main(PDSqueeze(), TFSqueeze(), feed_spec=None)
예제 #8
0
    model = enc_dec_model(args)
    test_model = enc_dec_model(args)
elif args.model_name == 'DNN_naive':
    model = DNN_naive(args, args.dnn_hidden)
    test_model = DNN_naive(args, args.dnn_hidden)
model.to(args.device)

print("-----Setting the Optimizer-----")
if args.opt == 'Adam':
    optimizer = optim.Adam(model.parameters(), lr=args.learning_rate)
elif args.opt == 'RMSprop':
    optimizer = optim.RMSprop(model.parameters(), lr=args.learning_rate)
elif args.opt == 'Adadelta':
    optimizer = optim.Adadelta(model.parameters(), lr=args.learning_rate)
elif args.opt == 'SGD':
    optimizer = optim.SGD(model.parameters(),
                          lr=args.learning_rate,
                          momentum=0.9)

train_log = train_main(args, train_loader, valid_loader, model, optimizer)

print("Loading Best model for testing")
best_model_path = args.save_model_path + '.best.pth'
tmp_model = torch.load(best_model_path)
state_dict = tmp_model.state_dict()
test_model.load_state_dict(state_dict)
test_model.to(args.device)

use = 'test'
test_main(args, test_model, test_loader, use, train_log)
예제 #9
0
class TFConv2d(tensorflow_api.TensorflowAPIBenchmarkBase):
    def build_graph(self, backward=False, dtype=None):
        import tensorflow as tf

        self.name = "conv2d"
        self.allow_growth = True

        input = tf.placeholder(name='input',
                               shape=config.input_shape,
                               dtype=tf.float32)
        filter = tf.placeholder(name='filter',
                                shape=config.filter_shape(for_tensorflow=True),
                                dtype=tf.float32)
        result = tf.nn.conv2d(input=input,
                              filter=filter,
                              strides=config.stride,
                              padding=config.padding(for_tensorflow=True),
                              data_format=config.data_format,
                              dilations=config.dilation,
                              use_cudnn_on_gpu=config.use_cudnn)

        self.feed_list = [input, filter]
        self.fetch_list = [result]
        if backward:
            self.append_gradients(result, [input])


if __name__ == '__main__':
    test_main(PDConv2d(), TFConv2d(), feed_spec=config.feed_spec)
예제 #10
0
            if backward:
                self.append_gradients(result, [x, y])


class TFMatmul(tensorflow_api.TensorflowAPIBenchmarkBase):
    def build_graph(self, backward=False):
        import tensorflow as tf

        self.name = "matmul"
        self.allow_growth = True

        x = tf.placeholder(name='x', shape=config.x_shape, dtype=tf.float32)
        y = tf.placeholder(name='y', shape=config.y_shape, dtype=tf.float32)
        result = tf.matmul(a=x,
                           b=y,
                           transpose_a=config.transpose_x,
                           transpose_b=config.transpose_y,
                           adjoint_a=False,
                           adjoint_b=False,
                           a_is_sparse=False,
                           b_is_sparse=False)

        self.feed_list = [x, y]
        self.fetch_list = [result]
        if backward:
            self.append_gradients(result, [x, y])


if __name__ == '__main__':
    test_main(PDMatmul(), TFMatmul(), feed_spec=None)
예제 #11
0
            starts = [0, 1, 1, 0]
            ends = [3, 8, 50, 30]
            result = fluid.layers.slice(data, axes=axes, starts=starts, ends=ends)

            self.feed_vars = [data]
            self.fetch_vars = [result]
            if backward:
                self.append_gradients(result, [data])


class TFSlice(tensorflow_api.TensorflowAPIBenchmarkBase):
    def build_graph(self, backward=False, dtype=None):
        import tensorflow as tf

        self.name = "slice"
        self.allow_growth = True

        data = tf.placeholder(name='data', shape=[10, 10, 100, 100], dtype=tf.float32)
        begin = [0, 1, 1, 0]
        size = [3, 7, 49, 30]
        result = tf.slice(data, begin, size)

        self.feed_list = [data]
        self.fetch_list = [result]
        if backward:
            self.append_gradients(result, [data])


if __name__ == '__main__':
    test_main(PDSlice(), TFSlice(), feed_spec=None)
예제 #12
0
        with fluid.program_guard(self.main_program, self.startup_program):
            data = fluid.data(
                name='data', shape=[10, 10, 100, 100], dtype='float32', lod_level=0)
            data.stop_gradient = False
            result = fluid.layers.sigmoid(data)

            self.feed_vars = [data]
            self.fetch_vars = [result]
            if backward:
                self.append_gradients(result, [data])


class TFSigmoid(tensorflow_api.TensorflowAPIBenchmarkBase):
    def build_graph(self, backward=False):
        import tensorflow as tf

        self.name = "sigmoid"
        self.allow_growth = True

        data = tf.placeholder(name='data', shape=[10, 10, 100, 100], dtype=tf.float32)
        result = tf.math.sigmoid(data)

        self.feed_list = [data]
        self.fetch_list = [result]
        if backward:
            self.append_gradients(result, [data])


if __name__ == '__main__':
    test_main(PDSigmoid(), TFSigmoid(), feed_spec=None)
예제 #13
0
                              lod_level=0)
            data.stop_gradient = False
            result = fluid.layers.abs(x=data)

            self.feed_vars = [data]
            self.fetch_vars = [result]
            if backward:
                self.append_gradients(result, [data])


class TFAbs(tensorflow_api.TensorflowAPIBenchmarkBase):
    def build_graph(self, backward=False, dtype=None):
        import tensorflow as tf

        self.name = "abs"
        self.allow_growth = True

        data = tf.placeholder(name='data',
                              shape=config.input_shape,
                              dtype=tf.float32)
        result = tf.abs(x=data)

        self.feed_list = [data]
        self.fetch_list = [result]
        if backward:
            self.append_gradients(result, [data])


if __name__ == '__main__':
    test_main(PDAbs(), TFAbs(), feed_spec=config.feed_spec)
예제 #14
0
                           lod_level=0)
            x.stop_gradient = False
            result = fluid.layers.expand(x=x, expand_times=config.expand_times)

            self.feed_vars = [x]
            self.fetch_vars = [result]
            if backward:
                self.append_gradients(result, [x])


class TFExpand(tensorflow_api.TensorflowAPIBenchmarkBase):
    def build_graph(self, backward=False, dtype=None):
        import tensorflow as tf

        self.name = "expand"
        self.allow_growth = True

        data = tf.placeholder(name='data',
                              shape=config.x_shape,
                              dtype=tf.float32)
        result = tf.tile(input=data, multiples=config.expand_times)

        self.feed_list = [data]
        self.fetch_list = [result]
        if backward:
            self.append_gradients(result, [data])


if __name__ == '__main__':
    test_main(PDExpand(), TFExpand(), feed_spec=None)
예제 #15
0
                              lod_level=0)
            data.stop_gradient = False
            result = fluid.layers.transpose(x=data, perm=[2, 3, 0, 1])

            self.feed_vars = [data]
            self.fetch_vars = [result]
            if backward:
                self.append_gradients(result, [data])


class TFTranspose(tensorflow_api.TensorflowAPIBenchmarkBase):
    def build_graph(self, backward=False, dtype=None):
        import tensorflow as tf

        self.name = "transpose"
        self.allow_growth = True

        data = tf.placeholder(name='data',
                              shape=[10, 10, 100, 100],
                              dtype=tf.float32)
        result = tf.transpose(a=data, perm=[2, 3, 0, 1], conjugate=False)

        self.feed_list = [data]
        self.fetch_list = [result]
        if backward:
            self.append_gradients(result, [data])


if __name__ == '__main__':
    test_main(PDTranspose(), TFTranspose(), feed_spec=None)
예제 #16
0
from common import paddle_api_benchmark as paddle_api


class PDConv2dTranspose(paddle_api.PaddleAPIBenchmarkBase):
    def build_program(self, backward=False, dtype=None):
        import paddle.fluid as fluid

        self.name = "conv2d_transpose"
        with fluid.program_guard(self.main_program, self.startup_program):
            input = fluid.data(name='input',
                               shape=[1, 1, 80, 63],
                               dtype=dtype,
                               lod_level=0)
            input.stop_gradient = False
            result = fluid.layers.conv2d_transpose(input=input,
                                                   num_filters=1,
                                                   filter_size=(3, 32),
                                                   padding=(1, 8),
                                                   stride=(1, 16),
                                                   bias_attr=False,
                                                   use_cudnn=True)

            self.feed_vars = [input]
            self.fetch_vars = [result]
            if backward:
                self.append_gradients(result, [input])


if __name__ == '__main__':
    test_main(PDConv2dTranspose(), feed_spec=None)
예제 #17
0
            self.fetch_vars = [result]
            if backward:
                self.append_gradients(result, [input])


class TFFC(tensorflow_api.TensorflowAPIBenchmarkBase):
    def build_graph(self, backward=False, dtype=None):
        import tensorflow as tf

        self.name = "fc"
        self.allow_growth = True

        input = tf.placeholder(name='input',
                               shape=config.input_shape,
                               dtype=tf.float32)
        result = tf.contrib.layers.fully_connected(
            inputs=input,
            num_outputs=config.size,
            weights_initializer=tf.constant_initializer(0.5),
            biases_initializer=tf.constant_initializer(0.1),
            activation_fn=None)

        self.feed_list = [input]
        self.fetch_list = [result]
        if backward:
            self.append_gradients(result, [input])


if __name__ == '__main__':
    test_main(PDFC(), TFFC(), feed_spec=None)
예제 #18
0
                name='y', shape=[1, 128, 1000], dtype='float32', lod_level=0)
            x.stop_gradient = False
            y.stop_gradient = False
            result = fluid.layers.elementwise_mul(x=x, y=y, act=None)

            self.feed_vars = [x, y]
            self.fetch_vars = [result]
            if backward:
                self.append_gradients(result, [x, y])


class TFElementwiseMul(tensorflow_api.TensorflowAPIBenchmarkBase):
    def build_graph(self, backward=False, dtype=None):
        import tensorflow as tf

        self.name = "elementwise_mul"
        self.allow_growth = True

        x = tf.placeholder(name='x', shape=[50, 128, 1000], dtype=tf.float32)
        y = tf.placeholder(name='y', shape=[1, 128, 1000], dtype=tf.float32)
        result = tf.multiply(x=x, y=y)

        self.feed_list = [x, y]
        self.fetch_list = [result]
        if backward:
            self.append_gradients(result, [x, y])


if __name__ == '__main__':
    test_main(PDElementwiseMul(), TFElementwiseMul(), feed_spec=None)
예제 #19
0
            input.stop_gradient = False
            result = fluid.layers.assign(input)

            self.feed_vars = [input]
            self.fetch_vars = [result]
            if backward:
                self.append_gradients(result, [input])


class TFAssign(tensorflow_api.TensorflowAPIBenchmarkBase):
    def build_graph(self, backward=False, dtype=None):
        import tensorflow as tf

        self.name = "assign"
        self.allow_growth = True

        input = tf.placeholder(
            name='input', shape=config.input_shape, dtype=tf.float32)
        ref = tf.Variable(
            tf.zeros(config.input_shape), name='target', dtype=tf.float32)
        assigns = tf.assign(ref=ref, value=input)

        self.feed_list = [input]
        self.fetch_list = [assigns]
        if backward:
            self.append_gradients(assigns, [input])


if __name__ == '__main__':
    test_main(PDAssign(), TFAssign(), feed_spec=None)
예제 #20
0
            result = fluid.embedding(
                input=input, size=config.table_shape, param_attr='table')

            self.feed_vars = [input, table]
            self.fetch_vars = [result]
            if backward:
                self.append_gradients(result, [table])


class TFEmbedding(tensorflow_api.TensorflowAPIBenchmarkBase):
    def build_graph(self, backward=False):
        import tensorflow as tf

        self.name = "embedding"
        self.allow_growth = True

        input = tf.placeholder(
            name='input', shape=config.input_shape, dtype=tf.int64)
        table = tf.placeholder(
            name='table', shape=config.table_shape, dtype=tf.float32)
        result = tf.nn.embedding_lookup(ids=input, params=table, max_norm=None)

        self.feed_list = [input, table]
        self.fetch_list = [result]
        if backward:
            self.append_gradients(result, [table])


if __name__ == '__main__':
    test_main(PDEmbedding(), TFEmbedding(), feed_spec=config.feed_spec)
예제 #21
0
                              lod_level=0)
            data.stop_gradient = False
            result = fluid.layers.softmax(input=data, use_cudnn=False, axis=-1)

            self.feed_vars = [data]
            self.fetch_vars = [result]
            if backward:
                self.append_gradients(result, [data])


class TFSoftmax(tensorflow_api.TensorflowAPIBenchmarkBase):
    def build_graph(self, backward=False, dtype=None):
        import tensorflow as tf

        self.name = "softmax"
        self.allow_growth = True

        data = tf.placeholder(name='data',
                              shape=[16, 10, 100],
                              dtype=tf.float32)
        result = tf.nn.softmax(logits=data, axis=-1)

        self.feed_list = [data]
        self.fetch_list = [result]
        if backward:
            self.append_gradients(result, [data])


if __name__ == '__main__':
    test_main(PDSoftmax(), TFSoftmax(), feed_spec=None)
예제 #22
0
        self.allow_growth = True

        input = tf.placeholder(name='input',
                               shape=config.input_shape,
                               dtype=tf.float32)
        scale = tf.placeholder(name='scale',
                               shape=[config.num_channels],
                               dtype=tf.float32)
        bias = tf.placeholder(name='bias',
                              shape=[config.num_channels],
                              dtype=tf.float32)
        mean, var = tf.nn.moments(x=input,
                                  axes=config.axes,
                                  shift=None,
                                  keepdims=False)
        result = tf.nn.batch_normalization(x=input,
                                           mean=mean,
                                           variance=var,
                                           offset=bias,
                                           scale=scale,
                                           variance_epsilon=config.epsilon)

        self.feed_list = [input, scale, bias]
        self.fetch_list = [result]
        if backward:
            self.append_gradients(result, [input, scale, bias])


if __name__ == '__main__':
    test_main(PDBatchNorm(), TFBatchNorm(), feed_spec=None)
예제 #23
0
            self.feed_vars = [data1, data2]
            self.fetch_vars = [result]
            if backward:
                self.append_gradients(result, [data1, data2])


class TFStack(tensorflow_api.TensorflowAPIBenchmarkBase):
    def build_graph(self, backward=False, dtype=None):
        import tensorflow as tf

        self.name = "stack"
        self.allow_growth = True

        data1 = tf.placeholder(name='data1',
                               shape=[10, 10, 100, 100],
                               dtype=tf.float32)
        data2 = tf.placeholder(name='data2',
                               shape=[10, 10, 100, 100],
                               dtype=tf.float32)
        result = tf.stack([data1, data2], axis=1)

        self.feed_list = [data1, data2]
        self.fetch_list = [result]
        if backward:
            self.append_gradients(result, [data1, data2])


if __name__ == '__main__':
    test_main(PDStack(), TFStack(), feed_spec=None)
예제 #24
0
                              lod_level=0)
            data.stop_gradient = False
            result = fluid.layers.reduce_sum(input=data,
                                             dim=-1,
                                             keep_dim=False)

            self.feed_vars = [data]
            self.fetch_vars = [result]
            if backward:
                self.append_gradients(result, [data])


class TFReduceSum(tensorflow_api.TensorflowAPIBenchmarkBase):
    def build_graph(self, backward=False, dtype=None):
        import tensorflow as tf

        self.name = "reduce_sum"
        self.allow_growth = True

        data = tf.placeholder(name='data', shape=[32, 768], dtype=tf.float32)
        result = tf.reduce_sum(input_tensor=data, axis=-1, keepdims=False)

        self.feed_list = [data]
        self.fetch_list = [result]
        if backward:
            self.append_gradients(result, [data])


if __name__ == '__main__':
    test_main(PDReduceSum(), TFReduceSum(), feed_spec=None)
예제 #25
0
                data, num_or_sections=[1, 2, 7], dim=1)

            self.feed_vars = [data]
            self.fetch_vars = [result1, result2, result3]
            if backward:
                self.append_gradients([result1, result2, result3], [data])


class TFSplit(tensorflow_api.TensorflowAPIBenchmarkBase):
    def build_graph(self, backward=False, dtype=None):
        import tensorflow as tf

        self.name = "split"
        self.allow_growth = True

        data = tf.placeholder(name='data',
                              shape=[10, 10, 100, 100],
                              dtype=tf.float32)
        result1, result2, result3 = tf.split(value=data,
                                             num_or_size_splits=[1, 2, 7],
                                             axis=1)

        self.feed_list = [data]
        self.fetch_list = [result1, result2, result3]
        if backward:
            self.append_gradients([result1, result2, result3], [data])


if __name__ == '__main__':
    test_main(PDSplit(), TFSplit(), feed_spec=None)
예제 #26
0
            self.fetch_vars = [result]
            if backward:
                self.append_gradients(result, [input])


class TFPool2d(tensorflow_api.TensorflowAPIBenchmarkBase):
    def build_graph(self, backward=False, dtype=None):
        import tensorflow as tf

        self.name = "pool2d"
        self.allow_growth = True

        input = tf.placeholder(name='input',
                               shape=config.input_shape,
                               dtype=tf.float32)
        result = tf.nn.pool(input,
                            window_shape=config.pool_size,
                            pooling_type=config.pool_type(),
                            strides=config.pool_stride,
                            padding=config.pool_padding(for_tensorflow=True),
                            data_format=config.data_format)

        self.feed_list = [input]
        self.fetch_list = [result]
        if backward:
            self.append_gradients(result, [input])


if __name__ == '__main__':
    test_main(PDPool2d(), TFPool2d(), feed_spec=None)
예제 #27
0
파일: test.py 프로젝트: Nunpuking/NI_SFC
parser.add_argument("--model_path", type=str, default='')
parser.add_argument("--save_dir", type=str, default='./result/')
parser.add_argument("--dataset_path", type=str, default='')
parser.add_argument("--save_log_path", type=str, default='')

parser.add_argument("--n_valid_print", type=int, default=3)
parser.add_argument("--rank_method",
                    type=str,
                    default=['sfcid', 'src_max', 'dst_max'])
parser.add_argument("--use", type=str, default='test')

args = parser.parse_args()
args.cuda = torch.cuda.is_available()
args.device = torch.device("cuda" if args.cuda else "cpu")

print("-----Loading Training Dataset-----")
placement = load_placement(args.raw_placement_dataset_path)

dataset_path = args.dataset_path
dataset = pd.read_csv(dataset_path, index_col=0)
dataset = np.array(dataset)

data_loader = sfc_dataset(args, dataset, placement)
print("# of dataset  : {}".format(data_loader.len()))

print("-----Building the Model-----")
test_model = torch.load(args.model_path)
test_model.to(args.device)

test_main(args, test_model, data_loader, use=args.use)
예제 #28
0
            input.stop_gradient = False
            result = fluid.one_hot(input=input, depth=config.depth)

            self.feed_vars = [input]
            self.fetch_vars = [result]


class TFOneHot(tensorflow_api.TensorflowAPIBenchmarkBase):
    def build_graph(self, backward=False, dtype=None):
        import tensorflow as tf

        self.name = "one_hot"
        self.allow_growth = True

        input = tf.placeholder(
            name='input', shape=config.input_shape, dtype=tf.int32)
        result = tf.one_hot(
            indices=input,
            depth=config.depth,
            on_value=None,
            off_value=None,
            axis=None,
            dtype=None)

        self.feed_list = [input]
        self.fetch_list = [result]


if __name__ == '__main__':
    test_main(PDOneHot(), TFOneHot(), feed_spec=config.feed_spec)
예제 #29
0
                              lod_level=0)
            data.stop_gradient = False
            result = fluid.layers.reshape(x=data, shape=config.shape)

            self.feed_vars = [data]
            self.fetch_vars = [result]
            if backward:
                self.append_gradients(result, [data])


class TFReshape(tensorflow_api.TensorflowAPIBenchmarkBase):
    def build_graph(self, backward=False, dtype=None):
        import tensorflow as tf

        self.name = "reshape"
        self.allow_growth = True

        data = tf.placeholder(name='data',
                              shape=config.x_shape,
                              dtype=tf.float32)
        result = tf.reshape(tensor=data, shape=config.shape)

        self.feed_list = [data]
        self.fetch_list = [result]
        if backward:
            self.append_gradients(result, [data])


if __name__ == '__main__':
    test_main(PDReshape(), TFReshape(), feed_spec=None)