コード例 #1
0
ファイル: sequence.py プロジェクト: delpart/CNTK
def _santize_step_function(f):
    import types
    from cntk.internal.utils import get_python_function_arguments
    if isinstance(f, types.FunctionType):
        py_args, _ = get_python_function_arguments(f)
        try:
            cntk_f, cntk_args = Function._to_Function_unchecked(f)
            if len(cntk_f.arguments) > len(py_args):
                cntk_args = [v.name for v in cntk_f.arguments]
                additional_cntk_args = set(cntk_args) - set(py_args)
                raise TypeError(('Recurrence Python step function makes use of additional CNTK variables or placeholders: {}. '
                             'Your step function arguments in Python code are: {}, '
                             'while the converted CNTK function argument are: {}. '
                             'This is currently not a supported Python step function definition. '
                             'Note that the current supported Python step function signature is: '
                             'step_function(prev_state_1, prev_state_2, ..., prev_state_n, sequence_input_x) -> next_state_1, next_state_2, ..., next_state_n '
                             'in which no references to any CNTK variables or placeholders are allowed.'
                             ).format(additional_cntk_args, py_args, cntk_args))
            f = Function._sanitize_check_Function(cntk_f, cntk_args, f)
        except TypeError as e:
            if str(e) != 'parameters cannot be created inside a @Function def':
                raise
            else:
                raise TypeError('Parameter cannot be created inside Recurrence Python step function.')

    return f
コード例 #2
0
def _santize_step_function(f):
    import types
    from cntk.internal.utils import get_python_function_arguments
    if isinstance(f, types.FunctionType):
        py_args, _ = get_python_function_arguments(f)
        try:
            cntk_f, cntk_args = Function._to_Function_unchecked(f)
            if len(cntk_f.arguments) > len(py_args):
                cntk_args = [v.name for v in cntk_f.arguments]
                additional_cntk_args = set(cntk_args) - set(py_args)
                raise TypeError((
                    'Recurrence Python step function makes use of additional CNTK variables or placeholders: {}. '
                    'Your step function arguments in Python code are: {}, '
                    'while the converted CNTK function argument are: {}. '
                    'This is currently not a supported Python step function definition. '
                    'Note that the current supported Python step function signature is: '
                    'step_function(prev_state_1, prev_state_2, ..., prev_state_n, sequence_input_x) -> next_state_1, next_state_2, ..., next_state_n '
                    'in which no references to any CNTK variables or placeholders are allowed.'
                ).format(additional_cntk_args, py_args, cntk_args))
            f = Function._sanitize_check_Function(cntk_f, cntk_args, f)
        except TypeError as e:
            if str(e) != 'parameters cannot be created inside a @Function def':
                raise
            else:
                raise TypeError(
                    'Parameter cannot be created inside Recurrence Python step function.'
                )

    return f
コード例 #3
0
ファイル: __init__.py プロジェクト: eric-seekas/CNTK
def universal(update_func, parameters):
    '''
    Creates a learner which uses a CNTK function to update the parameters.

    Args:
        update_func: function that takes parameters and gradients as arguments and
         returns a :class:`~cntk.ops.functions.Function` that performs the
         desired updates. The returned function updates the parameters by
         means of containing :func:`~cntk.ops.assign` operations.
         If ``update_func`` does not contain :func:`~cntk.ops.assign` operations
         the parameters will not be updated.
        parameters (list): list of network parameters to tune.
         These can be obtained by the root operator's `parameters`.

    Returns:
        :class:`~cntk.learners.Learner`: learner instance that can be passed to
        the :class:`~cntk.train.trainer.Trainer`

    Examples:
        >>> def my_adagrad(parameters, gradients):
        ...     accumulators = [C.constant(0, shape=p.shape, dtype=p.dtype, name='accum') for p in parameters]
        ...     update_funcs = []
        ...     for p, g, a in zip(parameters, gradients, accumulators):
        ...         accum_new = C.assign(a, g * g)
        ...         update_funcs.append(C.assign(p, p - 0.01 * g / C.sqrt(accum_new + 1e-6)))
        ...     return C.combine(update_funcs)
        ...
        >>> x = C.input_variable((10,))
        >>> y = C.input_variable((2,))
        >>> z = C.layers.Sequential([C.layers.Dense(100, activation=C.relu), C.layers.Dense(2)])(x)
        >>> loss = C.cross_entropy_with_softmax(z, y)
        >>> learner = C.universal(my_adagrad, z.parameters)
        >>> trainer = C.Trainer(z, loss, learner)
        >>> # now trainer can be used as any other Trainer

    '''

    from .. import constant
    args, _ = utils.get_python_function_arguments(update_func)
    if len(args) != 2:
        raise ValueError(
            'update_func must be a function that accepts two arguments (parameters, gradients)'
        )
    gradients = []
    for p in parameters:
        if any(dim < 0 for dim in p.shape):
            raise ValueError(
                'parameter %s has inferred dimensions. Please create the learner after all parameter shapes have been determined'
                % str(p))
        gradients.append(constant(0, shape=p.shape, dtype=p.dtype,
                                  name='grad'))

    result = update_func(parameters, gradients)

    return cntk_py.universal_learner(parameters, gradients, result)
コード例 #4
0
 def add_annotations(f):
     # prepare the signature
     param_names, annotations = get_python_function_arguments(f)
     if annotations:
         raise ValueError('@Signature cannot be applied to functions that already have annotations')
     annotations = {}
     if len(args) + len(kwargs) != len(param_names):
         raise TypeError("{} annotations provided for function to be decorated, but function has {} parameters".format(len(args) + len(kwargs), len(param_names)))
     # implant anotations into f
     params_dict = { name: name for name in param_names }
     f.__annotations__ = map_function_arguments(param_names, params_dict, *args, **kwargs)
     return f # and return the updated function
コード例 #5
0
ファイル: typing.py プロジェクト: AllanYiin/CNTK
 def add_annotations(f):
     # prepare the signature
     param_names, annotations = get_python_function_arguments(f)
     if annotations:
         raise ValueError('@Signature cannot be applied to functions that already have annotations')
     annotations = {}
     if len(args) + len(kwargs) != len(param_names):
         raise TypeError("{} annotations provided for function to be decorated, but function has {} parameters".format(len(args) + len(kwargs), len(param_names)))
     # implant anotations into f
     params_dict = { name: name for name in param_names }
     f.__annotations__ = map_function_arguments(param_names, params_dict, *args, **kwargs)
     return f # and return the updated function
コード例 #6
0
ファイル: __init__.py プロジェクト: gaoxuesong/CNTK
def universal(update_func, parameters):
    '''
    Creates a learner which uses a CNTK function to update the parameters.

    Args:
        update_func: function that takes parameters and gradients as arguments and
         returns a :class:`~cntk.ops.functions.Function` that performs the
         desired updates. The returned function updates the parameters by
         means of containing :func:`~cntk.ops.assign` operations.
         If ``update_func`` does not contain :func:`~cntk.ops.assign` operations
         the parameters will not be updated.
        parameters (list): list of network parameters to tune.
         These can be obtained by the root operator's `parameters`.

    Returns:
        :class:`~cntk.learners.Learner`: learner instance that can be passed to
        the :class:`~cntk.train.trainer.Trainer`

    Examples:
        >>> def my_adagrad(parameters, gradients):
        ...     accumulators = [C.constant(0, shape=p.shape, dtype=p.dtype, name='accum') for p in parameters]
        ...     update_funcs = []
        ...     for p, g, a in zip(parameters, gradients, accumulators):
        ...         accum_new = C.assign(a, g * g)
        ...         update_funcs.append(C.assign(p, p - 0.01 * g / C.sqrt(accum_new + 1e-6)))
        ...     return C.combine(update_funcs)
        ...
        >>> x = C.input_variable((10,))
        >>> y = C.input_variable((2,))
        >>> z = C.layers.Sequential([C.layers.Dense(100, activation=C.relu), C.layers.Dense(2)])(x)
        >>> loss = C.cross_entropy_with_softmax(z, y)
        >>> learner = C.universal(my_adagrad, z.parameters)
        >>> trainer = C.Trainer(z, loss, learner)
        >>> # now trainer can be used as any other Trainer

    '''

    from .. import constant
    args, _ = utils.get_python_function_arguments(update_func)
    if len(args) != 2:
        raise ValueError('update_func must be a function that accepts two arguments (parameters, gradients)')
    gradients = []
    for p in parameters:
        if any(dim<0 for dim in p.shape):
            raise ValueError('parameter %s has inferred dimensions. Please create the learner after all parameter shapes have been determined'%str(p))
        gradients.append(constant(0, shape=p.shape, dtype=p.dtype, name='grad'))

    result = update_func(parameters, gradients)

    return cntk_py.universal_learner(parameters, gradients, result)