Example #1
0
def Linear(shape, _inf, bias=True, init=_default_initializer, init_bias=0, input_rank=None, map_rank=None):
    out_shape = _as_tuple(shape)

    # TODO: implement the full semantics of the BrainScript code
    #inputShape =
    #    if       BS.Constants.IsNone (inputRank) then Inferred  # not given: one Inferred, which will get expanded
    #    else if !BS.Constants.IsNone (mapRank)   then Fail ("'inputRank' and 'mapRank' cannot be specified at the same time.")
    #    else Repeat (inputRank, Inferred)
    #W = ParameterTensor {_ConcatArrays (outDim, inputShape), init=init, initValueScale=initValueScale}
    #b = ParameterTensor {outDim, initValue=0}
    #outputRank = Length (_AsArray (outDim)) # support outputs with tensor layouts
    #inferInputRankToMap =
    #    if      !BS.Constants.IsNone (inputRank) then -1  # means not specified
    #    else if  BS.Constants.IsNone (mapRank)   then 0   # default to 'use all input dims'
    #    else mapRank
    #apply (x) =
    #    if bias
    #    then Times (W, x, outputRank=outputRank, inferInputRankToMap=inferInputRankToMap) + b
    #    else Times (W, x, outputRank=outputRank, inferInputRankToMap=inferInputRankToMap)

    W = Parameter(_inf.shape + out_shape, init=init     , name='W')
    b = Parameter(             out_shape, init=init_bias, name='b') if bias else None
    x = Placeholder(_inf=_inf, name='linear_arg')
    apply_x = Function.__matmul__(x, W) + b if bias else \
              Function.__matmul__(x, W)
    _name_and_extend_Function(apply_x, 'Linear')
    return apply_x
Example #2
0
def Embedding(shape, _inf, weights=None, init=_default_initializer, transpose=False):
    shape = _as_tuple(shape)
    full_shape = (shape + _inf.shape) if transpose else (_inf.shape + shape)
    if weights is None:  # no weights given: learn the embedding
        E = Parameter(full_shape, init=init, name='E')
    else:                # weights given: use them as constant
        UntestedBranchError("Embedding, from constant")
        E = Constant(full_shape, init=weights, name='E')  # TODO: can 'weights' be a CNTK object already? Then how to do this?
    x = Placeholder(_inf=_inf, name='embedding_arg')
    apply_x = Function.__matmul__(E, x) if transpose else \
              Function.__matmul__(x, E)     # x is expected to be sparse one-hot
    _name_and_extend_Function(apply_x, 'Embedding')
    return apply_x