Esempio n. 1
0
def reshape(x, shape, name=None):
    """
    Reinterpret input samples as having different tensor dimensions
    One dimension may be specified as 0 and will be inferred

    The output tensor has the same shape as 'shape'.
    
    The backward pass propagates the received gradient for the output-shape to the input shape.
    
    Examples:
        >>> C.eval(C.reshape([[0,1],[2,3],[4,5]], (2,3)))
        [array([[[ 0.,  4.,  3.],
                 [ 2.,  1.,  5.]]])]
            
    Args:        
        x: tensor to be reshaped
        shape: a tuple defining the resulting shape
        name: the name of the node in the network            
    Returns:
        :class:`cntk.graph.ComputationNode`
    """    
    from cntk.ops.cntk1 import NewReshape
    if not np.isscalar(shape):
        # cntk uses column major, thus we reverse the shape    
        shape = tuple(reversed(shape))    
    op = NewReshape(x, shape, 0, 0, name = name)
    wrap_numpy_arrays(op)            
    op.rank = get_rank(shape)
    return op
Esempio n. 2
0
def reshape(x, shape, name=None):
    """
    Reinterpret input samples as having different tensor dimensions
    One dimension may be specified as 0 and will be inferred

    The output tensor has the same shape as 'shape'.
    
    The backward pass propagates the received gradient for the output-shape to the input shape.
    
    Examples:
        >>> C.eval(C.reshape([[0,1],[2,3],[4,5]], (2,3)))
        [array([[[ 0.,  4.,  3.],
                 [ 2.,  1.,  5.]]])]
            
    Args:        
        x: tensor to be reshaped
        shape (tuple): a tuple defining the resulting shape
        name (str): the name of the node in the network            
    Returns:
        :class:`cntk.graph.ComputationNode`
    """
    from cntk.ops.cntk1 import NewReshape
    if not np.isscalar(shape):
        # cntk uses column major, thus we reverse the shape
        shape = tuple(reversed(shape))
    op = NewReshape(x, shape, 0, 0, name=name)
    wrap_numpy_arrays(op)
    op.rank = get_rank(shape)
    return op
Esempio n. 3
0
def reshape(x, shape, name=None):
    """
    Reinterpret input samples as having different tensor dimensions
    One dimension may be specified as 0 and will be inferred

    The output tensor has the same shape as 'shape'.
    
    The backward pass propagates the received gradient for the output-shape to the input shape.
    
    Examples:
        >>> C.eval(C.reshape([[0,1],[2,3],[4,5]], (2,3)))
        [array([[[ 0.,  4.,  3.],
                 [ 2.,  1.,  5.]]])]
            
    Args:        
        x: tensor to be reshaped
        shape: a tuple defining the resulting shape
        name: the name of the node in the network            
    Returns:
        :class:`cntk.graph.ComputationNode`
    """
    from cntk.ops.cntk1 import NewReshape
    return NewReshape(x, shape, 0, 0, name=name)