Exemplo n.º 1
0
    output_arg { name: 'b' type_attr: 'T' }
    attr { name: 'T' type: 'type' }
  }
  op {
    name: 'OpWithDefaultAttr'
    output_arg { name: 'a' type: DT_INT32 }
    attr { name: 'default_float' type: 'float' default_value { f: 123.0 } }
  }
  op {
    name: 'OpWithFutureDefaultAttr'
  }
""", _op_list)
op_def_registry.register_op_list(_op_list)
# NOTE(mrry): Dummy shape registrations for ops used in the tests.
for op_def in _op_list.op:
    tf.RegisterShape(op_def.name)(None)


class ImportGraphDefTest(tf.test.TestCase):
    def _MakeGraphDef(self,
                      text,
                      producer=tf.GRAPH_DEF_VERSION,
                      min_consumer=tf.GRAPH_DEF_VERSION_MIN_CONSUMER):
        text = "versions: { producer: %d min_consumer: %d };\n%s" % (
            producer, min_consumer, text)
        ret = tf.GraphDef()
        text_format.Merge(text, ret)
        return ret

    def testBasic(self):
        with tf.Graph().as_default():
Exemplo n.º 2
0
from tensorflow.contrib.framework.python.ops import add_arg_scope
from tensorflow.contrib.framework.python.ops import variables
from tensorflow.contrib.layers.python.layers import utils
from tensorflow.python.framework import ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import variable_scope

import tensorflow as tf

cMod = tf.load_op_library('layer_norm_fused_op.so')

# disabled these if using newer version of Tensorflow. (You can keep this
# if no error raised)
tf.RegisterShape("LayerNormCustom")(common_shapes.call_cpp_shape_fn)
tf.RegisterShape("LayerNormBiasAddCustom")(common_shapes.call_cpp_shape_fn)
tf.RegisterShape("LayerNormFusedCustom")(common_shapes.call_cpp_shape_fn)


@ops.RegisterGradient("LayerNormCustom")
def _LayerNormCustomGrad(op, grad):
    return [
        cMod.layer_norm_backprop_custom(op.inputs[0], grad,
                                        op.get_attr("epsilon"))
    ]


@ops.RegisterGradient("LayerNormBiasAddCustom")
def _LayerNormBiasAddCustomGrad(op, grad):
    in_back, beta_back = cMod.layer_norm_bias_add_backprop_custom(