def _PopBackGrad(op, dlist, delement): if dlist is None: dlist = gen_list_ops.empty_tensor_list( element_dtype=delement.dtype, element_shape=gen_list_ops.tensor_list_element_shape( op.outputs[0], shape_type=dtypes.int32)) return gen_list_ops.tensor_list_push_back(dlist, delement)
def empty_tensor_list(element_shape, element_dtype, max_num_elements=None, name=None): if max_num_elements is None: max_num_elements = -1 return gen_list_ops.empty_tensor_list(element_shape=element_shape, element_dtype=element_dtype, max_num_elements=max_num_elements, name=name)
def _TensorListFromTensor(op, dlist): if op.inputs[0].shape[0] is not None: num_elements = op.inputs[0].shape[0] else: num_elements = None if dlist is None: dlist = gen_list_ops.empty_tensor_list( element_dtype=op.inputs[0].dtype, element_shape=-1) return gen_list_ops.tensor_list_stack(dlist, element_dtype=op.inputs[0].dtype, num_elements=num_elements)
def empty_tensor_list(element_shape, element_dtype, max_num_elements=None, name=None): if max_num_elements is None: max_num_elements = -1 return gen_list_ops.empty_tensor_list( element_shape=element_shape, element_dtype=element_dtype, max_num_elements=max_num_elements, name=name)
def _TensorListFromTensor(op, dlist): if op.inputs[0].shape[0] is not None: num_elements = op.inputs[0].shape[0] else: num_elements = None if dlist is None: dlist = gen_list_ops.empty_tensor_list( element_dtype=op.inputs[0].dtype, element_shape=-1) return gen_list_ops.tensor_list_stack( dlist, element_dtype=op.inputs[0].dtype, num_elements=num_elements)
def _TensorListFromTensorGrad(op, dlist): """Gradient for TensorListFromTensor.""" if op.inputs[0].shape[0] is not None: num_elements = op.inputs[0].shape[0] else: num_elements = None if dlist is None: dlist = gen_list_ops.empty_tensor_list( element_dtype=op.inputs[0].dtype, element_shape=gen_list_ops.tensor_list_element_shape( op.outputs[0], shape_type=dtypes.int32)) return gen_list_ops.tensor_list_stack(dlist, element_dtype=op.inputs[0].dtype, num_elements=num_elements)
def _TensorListFromTensorGrad(op, dlist): """Gradient for TensorListFromTensor.""" if op.inputs[0].shape[0] is not None: num_elements = op.inputs[0].shape[0] else: num_elements = None if dlist is None: dlist = gen_list_ops.empty_tensor_list( element_dtype=op.inputs[0].dtype, element_shape=gen_list_ops.tensor_list_element_shape( op.outputs[0], shape_type=dtypes.int32)) return gen_list_ops.tensor_list_stack( dlist, element_dtype=op.inputs[0].dtype, num_elements=num_elements)
def _PopBackGradient(unused_op, dlist, delement): if dlist is None: dlist = gen_list_ops.empty_tensor_list(element_dtype=delement.dtype, element_shape=-1) return gen_list_ops.tensor_list_push_back(dlist, delement)
def _PopBackGradient(unused_op, dlist, delement): if dlist is None: dlist = gen_list_ops.empty_tensor_list( element_dtype=delement.dtype, element_shape=-1) return gen_list_ops.tensor_list_push_back(dlist, delement)