Exemple #1
0
 def _WeMustGoDeeper(self, msg):
   with self.assertRaisesOpError(msg):
     node_def = ops._NodeDef("op_type", "name")
     node_def_orig = ops._NodeDef("op_type_orig", "orig")
     op_orig = ops.Operation(node_def_orig, ops.get_default_graph())
     op = ops.Operation(node_def, ops.get_default_graph(), original_op=op_orig)
     raise errors.UnauthenticatedError(node_def, op, "true_err")
Exemple #2
0
    def testInputsAndOutputs(self):
        g = ops.Graph()
        op1 = ops.Operation(ops._NodeDef("noop", "myop1"), g, [],
                            [dtypes.float32])
        self.assertEqual(1, len(op1.values()))
        float1_t, = op1.values()

        op2 = ops.Operation(ops._NodeDef("reop", "myop2"), g, [],
                            [dtypes.float32, dtypes.string])
        self.assertEqual(2, len(op2.values()))
        float2_t, label2_str_t = op2.values()

        # Note that we consume label2_str_t twice here.
        op3 = ops.Operation(ops._NodeDef("add", "myop3"), g,
                            [float1_t, label2_str_t, label2_str_t],
                            [dtypes.float32, dtypes.int32])
        self.assertEqual(2, len(op3.values()))

        self.assertEqual(1, len(float1_t._consumers))
        self.assertEqual(op3, float1_t._consumers[0])

        self.assertEqual(0, len(float2_t._consumers))

        self.assertEqual(2, len(label2_str_t._consumers))
        self.assertEqual(op3, label2_str_t._consumers[0])
        self.assertEqual(op3, label2_str_t._consumers[1])

        self.assertProtoEquals(
            """
    op:'add' name:'myop3'
    input:'myop1' input:'myop2:1' input:'myop2:1'
    """, op3.node_def)
Exemple #3
0
 def testInvalidNames(self):
     g = ops.Graph()
     with self.assertRaises(ValueError):
         ops.Operation(ops._NodeDef("op", ""), g)
     with self.assertRaises(ValueError):
         ops.Operation(ops._NodeDef("op", "_invalid"), g)
     with self.assertRaises(ValueError):
         ops.Operation(ops._NodeDef("op", "-invalid"), g)
     with self.assertRaises(ValueError):
         ops.Operation(ops._NodeDef("op", "/invalid"), g)
Exemple #4
0
 def testDeviceObject(self):
     op = ops.Operation(ops._NodeDef("noop", "myop"), ops.Graph(), [], [])
     op._set_device("/job:goo/device:GPU:0")
     self.assertProtoEquals(
         "op:'noop' name:'myop' device:'/job:goo/device:GPU:0' ",
         op.node_def)
     op = ops.Operation(ops._NodeDef("noop", "op2"), ops.Graph(), [], [])
     op._set_device(
         pydev.Device(job="muu", device_type="CPU", device_index=0))
     self.assertProtoEquals(
         "op:'noop' name:'op2' device:'/job:muu/device:CPU:0'", op.node_def)
Exemple #5
0
 def testReferenceInput(self):
     g = ops.Graph()
     op1 = ops.Operation(ops._NodeDef("noop", "op1"), g, [],
                         [dtypes.float32_ref, dtypes.float32])
     self.assertProtoEquals("op:'noop' name:'op1'", op1.node_def)
     ref_t, nonref_t = op1.values()
     # NOTE(mrry): Must specify input_types to preserve ref-typed input.
     op2 = ops.Operation(ops._NodeDef("refop", "op2"),
                         g, [ref_t, nonref_t], [],
                         input_types=[dtypes.float32_ref, dtypes.float32])
     self.assertProtoEquals(
         "op:'refop' name:'op2' input:'op1' input:'op1:1'", op2.node_def)
     op3 = ops.Operation(ops._NodeDef("nonrefop", "op3"), g,
                         [ref_t, nonref_t], [])
     self.assertProtoEquals(
         "op:'nonrefop' name:'op3' input:'op1' input:'op1:1'", op3.node_def)
Exemple #6
0
 def testShape(self):
     op = ops.Operation(ops._NodeDef("noop", "myop"), ops.Graph(), [],
                        [dtypes.float32])
     t = op.outputs[0]
     self.assertEqual(tensor_shape.unknown_shape(), t.get_shape())
     t.set_shape([1, 2, 3])
     self.assertEqual([1, 2, 3], t.get_shape())
Exemple #7
0
    def testNoOutputs(self):
        g = ops.Graph()
        op1 = ops.Operation(ops._NodeDef("noop", "myop1"), g, [],
                            [dtypes.float32])
        float_t, = op1.values()
        op2 = ops.Operation(ops._NodeDef("reop", "myop2"), g, [float_t], [])
        self.assertEqual(0, len(op2.values()))
        self.assertEqual(1, len(op2.inputs))
        self.assertIs(float_t, op2.inputs[0])

        self.assertEqual(1, len(float_t._consumers))
        self.assertEqual(op2, float_t._consumers[0])

        self.assertProtoEquals("op:'noop' name:'myop1'", op1.node_def)
        self.assertProtoEquals("op:'reop' name:'myop2' input:'myop1'",
                               op2.node_def)
Exemple #8
0
 def testNoShapeFunction(self):
     g = ops.Graph()
     op = ops.Operation(ops._NodeDef("op", "an_op"),
                        g,
                        output_types=[dtypes.float32])
     self.assertEqual(
         tensor_shape.unknown_shape(),
         _apply_op(g, "an_op", [], [dtypes.float32]).get_shape())
Exemple #9
0
def copy_op_handler(info, op, copy_shape=True):
    """Copy a `tf.Operation`.

  Args:
    info: Transform._Info instance.
    op: the `tf.Operation` to be copied.
    copy_shape: also copy the shape of the tensor
  Returns:
    A copy of op.
  """
    # pylint: disable=protected-access

    # Transform control inputs:
    control_inputs_ = [
        info.transformer.transform_control_input_handler(info, ci)
        for ci in op.control_inputs
    ]
    control_inputs_ = [ci for ci in control_inputs_ if ci is not None]

    # Transform it if any:
    original_op_ = info.transformer.transform_original_op_handler(
        info, op._original_op)

    # Transform inputs:
    inputs_ = [info.transformer._transform_t(t) for t in op.inputs]

    # Clone the node def:
    node_def_ = deepcopy(op._node_def)

    # Transform name:
    name_ = info.transformer.new_name(op.name)
    name_ = info.graph_.unique_name(name_)
    node_def_.name = name_

    # Copy the other inputs needed for initialization
    output_types_ = op._output_types[:]
    input_types_ = op._input_types[:]

    # Make a copy of the op_def too.
    # Its unique to every _type_ of Operation.
    op_def_ = deepcopy(op._op_def)

    # Initialize a new Operation instance
    op_ = tf_ops.Operation(node_def_, info.graph_, inputs_, output_types_,
                           control_inputs_, input_types_, original_op_,
                           op_def_)

    # copy the shape over
    if copy_shape:
        for t, t_ in zip(op.outputs, op_.outputs):
            t_.set_shape(t.get_shape())

    # Add op to the graph
    info.graph_._add_op(op_)

    # pylint: enable=protected-access
    return op_
Exemple #10
0
def copy_op_handler(info, op, new_inputs, copy_shape=True):
    """Copy a `tf.Operation`.

  Args:
    info: Transform._TmpInfo instance.
    op: the `tf.Operation` to be copied.
    new_inputs: The new inputs for this op.
    copy_shape: also copy the shape of the tensor
  Returns:
    A `(op, op_outputs)` tuple containing the transformed op and its outputs.
  """
    # The `new_inputs` was added to this function. For compatibility reason,
    # let's raise an error if `new_inputs` is a boolean.
    if isinstance(new_inputs, bool):
        raise TypeError("the `new_inputs` argument must be an iterable.")

    # pylint: disable=protected-access

    # Clone the node def:
    node_def_ = deepcopy(op.node_def)

    # Transform name:
    name_ = info.new_name(op.name)
    name_ = info.graph_.unique_name(name_)
    node_def_.name = name_

    # Copy the other inputs needed for initialization
    output_types_ = op._output_types[:]
    input_types_ = op._input_types[:]

    # Make a copy of the op_def too.
    # Its unique to every _type_ of Operation.
    op_def_ = deepcopy(op.op_def)

    # Initialize a new Operation instance
    op_ = tf_ops.Operation(node_def_, info.graph_, new_inputs, output_types_,
                           [], input_types_, None, op_def_)

    # copy the shape over
    if copy_shape:
        for t, t_ in zip(op.outputs, op_.outputs):
            t_.set_shape(t.get_shape())

    # Original op cannot be finalised here yet. Because some ops require this
    # attribute to exist, we will create a dummy original_op first and then
    # later finalise it with the actual original_op when all the ops have
    # been copied.
    # TODO(fkp): Stop worrying about _original_op and remove this code?
    if op._original_op:
        op_._original_op = op._original_op

    # Add op to the graph
    info.graph_._add_op(op_)

    return op_, op_.outputs
Exemple #11
0
    def _deepcopy_op_helper(self, src_op, new_name, new_inputs,
                            new_ctrl_inputs, new_original_op, to_graph):
        """
        deepcopy op helper function

        :param src_op: source op instance.
        :param new_name: new op name.
        :param new_inputs: new op inputs.
        :param new_ctrl_inputs: new op control inputs.
        :param new_original_op: new original op, Used to associate the new `Operation` with an
        # existing `Operation` (for example, a replica with the op that was replicated).
        :param to_graph: dest graph
        :return: the deep copy op instance
        """

        # Make a new node_def based on that of the original.
        # An instance of tensorflow.core.framework.graph_pb2.NodeDef,
        # it stores String-based info such as name, device and type of the op.
        # Unique to every Operation instance.
        new_node_def = deepcopy(src_op.node_def)

        # Change the name
        new_node_def.name = new_name

        # Copy the other inputs needed for initialization
        output_types = src_op._output_types[:]
        input_types = src_op._input_types[:]

        # If new inputs is mpc compare op, we must change the input type to tf.float64
        # so we must create a cast op, the op src dtype == dest dtype == float64
        if (self._is_cast_op(src_op) and is_mpc_compare_tensor(new_inputs[0])):
            new_op = mpc_cast(new_inputs[0], tf.float64, name=new_name).op
            self.op_info[new_name] = new_op
            return new_op

        # Make a copy of the op_def too.
        # Its unique to every _type_ of Operation.
        op_def = deepcopy(src_op.op_def)

        # Initialize a new Operation instance
        new_op = ops.Operation(new_node_def,
                            to_graph,
                            new_inputs,
                            output_types,
                            new_ctrl_inputs,
                            input_types,
                            new_original_op,
                            op_def)

        # Save the op info
        self.op_info[new_name] = new_op

        return new_op
Exemple #12
0
    def _deepcopy_op_helper(self, src_op, new_name, new_inputs,
                            new_ctrl_inputs, new_original_op, to_graph):
        """
        deepcopy op helper function

        :param src_op: source op instance.
        :param new_name: new op name.
        :param new_inputs: new op inputs.
        :param new_ctrl_inputs: new op control inputs.
        :param new_original_op: new original op, Used to associate the new `Operation` with an
        # existing `Operation` (for example, a replica with the op that was replicated).
        :param to_graph: dest graph
        :return: the deep copy op instance
        """

        # If the new_name op has already exist, find it and return
        if (new_name in self.dc_op_info.keys()):
            return self.dc_op_info[new_name]

        # Make a new node_def based on that of the original.
        # An instance of tensorflow.core.framework.graph_pb2.NodeDef,
        # it stores String-based info such as name, device and type of the op.
        # Unique to every Operation instance.
        new_node_def = deepcopy(src_op.node_def)

        # Change the name
        new_node_def.name = new_name

        # Copy the other inputs needed for initialization
        output_types = src_op._output_types[:]
        input_types = src_op._input_types[:]

        # Make a copy of the op_def too.
        # Its unique to every _type_ of Operation.
        op_def = deepcopy(src_op.op_def)

        # Initialize a new Operation instance
        try:
            new_op = ops.Operation(new_node_def,
                                to_graph,
                                new_inputs,
                                output_types,
                                new_ctrl_inputs,
                                input_types,
                                new_original_op,
                                op_def)
        except Exception as e:
            rtt_get_logger().error(str(e))

        # Save the op info
        self.dc_op_info[new_name] = new_op

        return new_op
Exemple #13
0
def copy_op_handler(info, op, copy_shape=True):
    """Copy a `tf.Operation`.

  Args:
    info: Transform._TmpInfo instance.
    op: the `tf.Operation` to be copied.
    copy_shape: also copy the shape of the tensor
  Returns:
    A `(op, op_outputs)` tuple containgin the transformed op and its outputs.
  """
    # pylint: disable=protected-access

    # Clone the node def:
    node_def_ = deepcopy(op._node_def)

    # Transform name:
    name_ = info.new_name(op.name)
    name_ = info.graph_.unique_name(name_)
    node_def_.name = name_

    # Copy the other inputs needed for initialization
    output_types_ = op._output_types[:]
    input_types_ = op._input_types[:]

    # Make a copy of the op_def too.
    # Its unique to every _type_ of Operation.
    op_def_ = deepcopy(op._op_def)

    # Initialize a new Operation instance
    op_ = tf_ops.Operation(node_def_, info.graph_, [], output_types_, [],
                           input_types_, None, op_def_)

    # copy the shape over
    if copy_shape:
        for t, t_ in zip(op.outputs, op_.outputs):
            t_.set_shape(t.get_shape())

    # Finalize original op.
    if op._original_op:
        original_op = info.transform_original_op_handler(info, op._original_op)
        if original_op is None:
            logging.info("Could not find original op of: %s", op_.name)
        else:
            op_._original_op = original_op

    # Add op to the graph
    info.graph_._add_op(op_)

    return op_, op_.outputs
Exemple #14
0
def copy_op_handler(info, op, copy_shape=True):
    """Copy a `tf.Operation`.

  Args:
    info: Transform._TmpInfo instance.
    op: the `tf.Operation` to be copied.
    copy_shape: also copy the shape of the tensor
  Returns:
    A `(op, op_outputs)` tuple containing the transformed op and its outputs.
  """
    # pylint: disable=protected-access

    # Clone the node def:
    node_def_ = deepcopy(op._node_def)

    # Transform name:
    name_ = info.new_name(op.name)
    name_ = info.graph_.unique_name(name_)
    node_def_.name = name_

    # Copy the other inputs needed for initialization
    output_types_ = op._output_types[:]
    input_types_ = op._input_types[:]

    # Make a copy of the op_def too.
    # Its unique to every _type_ of Operation.
    op_def_ = deepcopy(op._op_def)

    # Initialize a new Operation instance
    op_ = tf_ops.Operation(node_def_, info.graph_, [], output_types_, [],
                           input_types_, None, op_def_)

    # copy the shape over
    if copy_shape:
        for t, t_ in zip(op.outputs, op_.outputs):
            t_.set_shape(t.get_shape())

    # Original op cannot be finalised here yet. Because some ops require this
    # attribute to exist, we will create a dummy original_op first and then
    # later finalise it with the actual original_op when all the ops have
    # been copied.
    if op._original_op:
        op_._original_op = op._original_op

    # Add op to the graph
    info.graph_._add_op(op_)

    return op_, op_.outputs
Exemple #15
0
def copy_op_handler(info, op):
    """Copy a tf.Operation.

  Args:
    info: Transform._Info instance.
    op: the tf.Operation to be copied.
  Returns:
    A copy of op.
  """
    # pylint: disable=protected-access

    # If it has control inputs, call this function recursively on each.
    control_inputs_ = [
        info.transformer._transform_op(control_input)
        for control_input in op.control_inputs
    ]

    # If it has an original_op parameter, copy it
    original_op_ = info.transformer.transform_original_op_hanlder(
        info, op._original_op)

    # If it has inputs, call this function recursively on each.
    inputs_ = [info.transformer._transform_t(t) for t in op.inputs]

    node_def_ = deepcopy(op._node_def)
    name_ = info.transformer.new_name(op.name)
    name_ = info.graph_.unique_name(name_)
    node_def_.name = name_

    # Copy the other inputs needed for initialization
    output_types_ = op._output_types[:]
    input_types_ = op._input_types[:]

    # Make a copy of the op_def too.
    # Its unique to every _type_ of Operation.
    op_def_ = deepcopy(op._op_def)

    # Initialize a new Operation instance
    op_ = tf_ops.Operation(node_def_, info.graph_, inputs_, output_types_,
                           control_inputs_, input_types_, original_op_,
                           op_def_)
    info.graph_._add_op(op_)

    # pylint: enable=protected-access
    return op_
Exemple #16
0
    def testNoInputs(self):
        op = ops.Operation(ops._NodeDef("noop", "myop"), ops.Graph(), [],
                           [dtypes.float32, dtypes.string])
        self.assertEqual(2, len(op.values()))
        self.assertEqual(0, len(op.inputs))
        self.assertEqual("myop", op.name)

        float_t, label_str_t = op.values()
        self.assertEqual(dtypes.float32, float_t.dtype)
        self.assertEqual(op, float_t.op)
        self.assertEqual(0, float_t._value_index)
        self.assertEqual(0, len(float_t._consumers))
        self.assertEqual("myop", float_t._as_node_def_input())

        self.assertEqual(dtypes.string, label_str_t.dtype)
        self.assertEqual(op, label_str_t.op)
        self.assertEqual(1, label_str_t._value_index)
        self.assertEqual(0, len(label_str_t._consumers))
        self.assertEqual("myop:1", label_str_t._as_node_def_input())

        self.assertProtoEquals("op:'noop' name:'myop'", op.node_def)
def copy_op_to_graph(org_instance, to_graph, variables, scope=''):
    """Returns a copy of an operation from another Graph under a specified scope.

  Given an `Operation` `org_instance` from one `Graph`,
  initializes and returns a copy of it from another `Graph`,
  under the specified scope (default `""`).

  The copying is done recursively, so any `Operation` whose output
  is required to evaluate the `org_instance`, is also copied (unless
  already done).

  Since `Variable` instances are copied separately, those required
  to evaluate `org_instance` must be provided as input.

  Args:
    org_instance: An `Operation` from some `Graph`. Could be a
      `Placeholder` as well.
    to_graph: The `Graph` to copy `org_instance` to.
    variables: An iterable of `Variable` instances to copy `org_instance` to.
    scope: A scope for the new `Variable` (default `""`).

  Returns:
    The copied `Operation` from `to_graph`.

  Raises:
    TypeError: If `org_instance` is not an `Operation` or `Tensor`.
  """

    #The name of the new instance
    if scope != '':
        new_name = scope + '/' + org_instance.name
    else:
        new_name = org_instance.name
    # print(new_name)
    #Extract names of variables
    copied_variables = dict((x.name, x) for x in variables)
    #If a variable by the new name already exists, return the
    #correspondng tensor that will act as an input
    if new_name in copied_variables:
        return to_graph.get_tensor_by_name(copied_variables[new_name].name)
    #If an instance of the same name exists, return appropriately
    try:
        already_present = to_graph.as_graph_element(new_name,
                                                    allow_tensor=True,
                                                    allow_operation=True)
        return already_present
    except:
        pass
    #Get the collections that the new instance needs to be added to.
    #The new collections will also be a part of the given scope.
    collections = []
    for name, collection in org_instance.graph._collections.items():
        if org_instance in collection:
            if scope == '':
                collections.append(name)
            else:
                collections.append(scope + '/' + name)
    #Take action based on the class of the instance

    if isinstance(org_instance, ops.Tensor):
        #If its a Tensor, it is one of the outputs of the underlying
        #op. Therefore, copy the op itself and return the appropriate
        #output.
        op = org_instance.op
        new_op = copy_op_to_graph(op, to_graph, variables, scope)
        output_index = op.outputs.index(org_instance)
        new_tensor = new_op.outputs[output_index]
        #Add to collections if any
        for collection in collections:
            to_graph.add_to_collection(collection, new_tensor)

        return new_tensor

    elif isinstance(org_instance, ops.Operation):
        op = org_instance

        #If it has an original_op parameter, copy it
        if op._original_op is not None:
            new_original_op = copy_op_to_graph(op._original_op, to_graph,
                                               variables, scope)
        else:
            new_original_op = None

        #If it has control inputs, call this function recursively on each.
        new_control_inputs = [
            copy_op_to_graph(x, to_graph, variables, scope)
            for x in op.control_inputs
        ]

        #If it has inputs, call this function recursively on each.
        new_inputs = [
            copy_op_to_graph(x, to_graph, variables, scope) for x in op.inputs
        ]

        #Make a new node_def based on that of the original.
        #An instance of tensorflow.core.framework.node_def_pb2.NodeDef, it
        #stores String-based info such as name, device and type of the op.
        #Unique to every Operation instance.
        #Colocate info needs to be cleared here
        new_attr = dict()
        for key in op.node_def.attr:
            # don't copy colocate info
            if key == '_class':
                pass
            else:
                new_attr[key] = op.node_def.attr[key]

        new_node_def = tf.NodeDef(name=new_name,
                                  op=op.node_def.op,
                                  input=op.node_def.input,
                                  device=op.node_def.device,
                                  attr=new_attr)

        #Copy the other inputs needed for initialization
        output_types = op._output_types[:]
        input_types = op._input_types[:]

        #Make a copy of the op_def too.
        #Its unique to every _type_ of Operation.
        op_def = deepcopy(op.op_def)

        #Initialize a new Operation instance
        new_op = ops.Operation(new_node_def, to_graph, new_inputs,
                               output_types, new_control_inputs, input_types,
                               new_original_op, op_def)
        #Use Graph's hidden methods to add the op
        to_graph._add_op(new_op)  # pylint: disable=protected-access
        to_graph._record_op_seen_by_control_dependencies(new_op)
        for device_function in reversed(to_graph._device_function_stack):
            new_op._set_device(device_function(new_op))

        return new_op

    else:
        raise TypeError('Could not copy instance: ' + str(org_instance))
Exemple #18
0
def copy_to_graph(org_instance, to_graph, copied_variables={}, namespace=""):
    """
    Makes a copy of the Operation/Tensor instance 'org_instance'
    for the graph 'to_graph', recursively. Therefore, all required
    structures linked to org_instance will be automatically copied.
    'copied_variables' should be a dict mapping pertinent copied variable
    names to the copied instances.
     
    The new instances are automatically inserted into the given 'namespace'.
    If namespace='', it is inserted into the graph's global namespace.
    However, to avoid naming conflicts, its better to provide a namespace.
    If the instance(s) happens to be a part of collection(s), they are
    are added to the appropriate collections in to_graph as well.
    For example, for collection 'C' which the instance happens to be a
    part of, given a namespace 'N', the new instance will be a part of
    'N/C' in to_graph.
 
    Returns the corresponding instance with respect to to_graph.
 
    TODO: Order of insertion into collections is not preserved
    """

    #The name of the new instance
    if namespace != '':
        new_name = namespace + '/' + org_instance.name
    else:
        new_name = org_instance.name

    #If a variable by the new name already exists, return the
    #correspondng tensor that will act as an input
    if new_name in copied_variables:
        return to_graph.get_tensor_by_name(copied_variables[new_name].name)

    #If an instance of the same name exists, return appropriately
    try:
        already_present = to_graph.as_graph_element(new_name,
                                                    allow_tensor=True,
                                                    allow_operation=True)
        return already_present
    except:
        pass

    #Get the collections that the new instance needs to be added to.
    #The new collections will also be a part of the given namespace.
    collections = []
    for name, collection in org_instance.graph._collections.items():
        if org_instance in collection:
            if namespace == '':
                collections.append(name)
            else:
                collections.append(namespace + '/' + name)

    #Take action based on the class of the instance

    #if isinstance(org_instance, tf.python.framework.ops.Tensor):
    if isinstance(org_instance, ops.Tensor):

        #If its a Tensor, it is one of the outputs of the underlying
        #op. Therefore, copy the op itself and return the appropriate
        #output.
        op = org_instance.op
        new_op = copy_to_graph(op, to_graph, copied_variables, namespace)
        output_index = op.outputs.index(org_instance)
        new_tensor = new_op.outputs[output_index]
        #Add to collections if any
        for collection in collections:
            to_graph.add_to_collection(collection, new_tensor)

        return new_tensor

    #elif isinstance(org_instance, tf.python.framework.ops.Operation):
    elif isinstance(org_instance, ops.Operation):

        op = org_instance

        #If it has an original_op parameter, copy it
        if op._original_op is not None:
            new_original_op = copy_to_graph(op._original_op, to_graph,
                                            copied_variables, namespace)
        else:
            new_original_op = None

        #If it has control inputs, call this function recursively on each.
        new_control_inputs = [
            copy_to_graph(x, to_graph, copied_variables, namespace)
            for x in op.control_inputs
        ]

        #If it has inputs, call this function recursively on each.
        new_inputs = [
            copy_to_graph(x, to_graph, copied_variables, namespace)
            for x in op.inputs
        ]

        #Make a new node_def based on that of the original.
        #An instance of tensorflow.core.framework.graph_pb2.NodeDef, it
        #stores String-based info such as name, device and type of the op.
        #Unique to every Operation instance.
        new_node_def = deepcopy(op._node_def)
        #Change the name
        new_node_def.name = new_name

        #Copy the other inputs needed for initialization
        output_types = op._output_types[:]
        input_types = op._input_types[:]

        #Make a copy of the op_def too.
        #Its unique to every _type_ of Operation.
        op_def = deepcopy(op._op_def)

        #Initialize a new Operation instance
        #new_op = tf.python.framework.ops.Operation(new_node_def,
        new_op = ops.Operation(new_node_def, to_graph, new_inputs,
                               output_types, new_control_inputs, input_types,
                               new_original_op, op_def)
        #Use Graph's hidden methods to add the op
        to_graph._add_op(new_op)
        to_graph._record_op_seen_by_control_dependencies(new_op)
        #pdb.set_trace()
        for device_function in reversed(to_graph._device_function_stack):
            if device_function is not None:
                new_op._set_device(device_function(new_op))

        return new_op

    else:
        raise TypeError("Could not copy instance: " + str(org_instance))