コード例 #1
0
ファイル: builders.py プロジェクト: HaniAlmousli/Theano
    def __init__(self, inputs, outputs, grad_depth = 1, **kwargs):
        if not isinstance(outputs, list):
            raise TypeError('outputs must be list', outputs)
        for i in inputs + outputs:
            if not isinstance(i, gof.Variable):
                raise TypeError('inputs and outputs must be Variable instances', i)
        if 'updates' in kwargs:
            raise TypeError('updates are not allowed in kwargs')
        # TODO: the graph may have implicit inputs like Value and SharedVariable instances.
        #       what impact to they have on the validity of this Op?
        self.fn = orig_function(inputs, outputs, **kwargs)
        self.inputs = inputs
        self.outputs = outputs
        self.input_types = [input.type for input in inputs]
        self.output_types = [output.type for output in outputs]

        if grad_depth > 0:
            output_grads = [t() for t in self.output_types]
            gd = G.grad_sources_inputs(zip(self.outputs, output_grads), self.inputs)
            gs = map(gd.get, self.inputs)
            self.grad_ops = []
            for g in gs:
                if g is None:
                    self.grad_ops.append(lambda *args: None)
                else:
                    self.grad_ops.append(OpFromGraph(inputs + output_grads,
                                                     [g],
                                                     grad_depth = grad_depth - 1))
コード例 #2
0
    def __init__(self, inputs, outputs, grad_depth=1, **kwargs):
        if not isinstance(outputs, list):
            raise TypeError('outputs must be list', outputs)
        for i in inputs + outputs:
            if not isinstance(i, gof.Variable):
                raise TypeError(
                    'inputs and outputs must be Variable instances', i)
        if 'updates' in kwargs:
            raise TypeError('updates are not allowed in kwargs')
        # TODO: the graph may have implicit inputs like Value and SharedVariable instances.
        #       what impact to they have on the validity of this Op?
        self.fn = orig_function(inputs, outputs, **kwargs)
        self.inputs = inputs
        self.outputs = outputs
        self.input_types = [input.type for input in inputs]
        self.output_types = [output.type for output in outputs]

        if grad_depth > 0:
            output_grads = [t() for t in self.output_types]
            gd = G.grad_sources_inputs(zip(self.outputs, output_grads),
                                       self.inputs)
            gs = map(gd.get, self.inputs)
            self.grad_ops = []
            for g in gs:
                if g is None:
                    self.grad_ops.append(lambda *args: None)
                else:
                    self.grad_ops.append(
                        OpFromGraph(inputs + output_grads, [g],
                                    grad_depth=grad_depth - 1))
コード例 #3
0
ファイル: function.py プロジェクト: aelaguiz/Theano
def function(inputs, outputs=None, mode=None, updates=None, givens=None,
             no_default_updates=False, accept_inplace=False, name=None,
             rebuild_strict=True, allow_input_downcast=None, profile=None,
             on_unused_input=None):
    """
    Return a callable object that will calculate `outputs` from `inputs`.

    :type inputs: list of either Variable or Param instances.
    :param inputs: function parameters, these are not allowed to be shared
    variables

    :type outputs: list of Variables or Out instances
    :param outputs: expressions to compute

    :type mode: string or `Mode` instance.
    :param mode: compilation mode

    :type updates: iterable over pairs (shared_variable, new_expression). List, tuple or OrderedDict.
    :param updates: update the values for SharedVariable inputs according to these expressions

    :type givens: iterable over pairs (Var1, Var2) of Variables. List, tuple or dict.  The Var1
    and Var2 in each pair must have the same Type.

    :param givens: specific substitutions to make in the computation graph (Var2 replaces
    Var1).

    :type no_default_updates: either bool or list of Variables
    :param no_default_updates: if True, do not perform any automatic update on Variables.
    If False (default), perform them all. Else, perform automatic updates on all Variables
    that are neither in "updates" nor in "no_default_updates".

    :param name: an optional name for this function. The profile mode will print the time spent in this function.

    :param rebuild_strict: True (Default) is the safer and better tested setting, in which case
    `givens` must substitute new variables with the same Type as the variables they replace.
    False is a you-better-know-what-you-are-doing setting, that permits `givens` to replace
    variables with new variables of any Type.  The consequence of changing a Type is that all
    results depending on that variable may have a different Type too (the graph is rebuilt from
    inputs to outputs).  If one of the new types does not make sense for one of the Ops in the
    graph, an Exception will be raised.

    :type allow_input_downcast: Boolean or None
    :param allow_input_downcast: True means that the values passed as
    inputs when calling the function can be silently downcasted to fit
    the dtype of the corresponding Variable, which may lose precision.
    False means that it will only be cast to a more general, or
    precise, type. None (default) is almost like False, but allows
    downcasting of Python float scalars to floatX.

    :type profile: None, True, or ProfileStats instance
    :param profile: accumulate profiling information into a given ProfileStats
    instance. If argument is `True` then a new ProfileStats instance will be
    used.  This profiling object will be available via self.profile.

    :param on_unused_input: What to do if a variable in the 'inputs' list is
    not used in the graph. Possible values are 'raise', 'warn', 'ignore' and None.

    :rtype: Function instance
    :returns: a callable object that will compute the outputs (given the inputs)
    and update the implicit function arguments according to the `updates`.

    :note: Regarding givens: Be careful to make sure that these substitutions are
    independent--behaviour when Var1 of one pair appears in the graph leading to Var2 in
    another expression is undefined.  Replacements specified with givens are different from
    optimizations in that Var2 is not expected to be equivalent to Var1.


    Internal documentation:

        What happens when you call theano.function?
           1. RemoveShared: shared variables are just an abstraction to make
        things more convenient for the user. The shared variables are
        transformed into implicit inputs and implicit outputs. The
        optimizations don't see which variables are shared or not.
           2. FunctionGraph: determines whether a graph is valid. For example,
        suppose
        you merge the two apply nodes in our example above, ie, do the
        addition and the tanh at the same time. If you propose a merge that
        changes the resulting dtype or broadcastable pattern of V4, the fgraph
        will detect this.
                    inplace optimizations: say we have an apply node that
        does + on V1 and V2, with output V3. We can change the output to be
        V1, to use less memory. theano must be told that this optimization is
        happening though, so that other parts of the graph are given the
        correct (pre + or post + ) version of V1.
                  fgraph will raise an error if any of these types of
        modifications causes an error
                  fgraph also adds a field called "clients" to all variables.
        clients is a list of apply nodes that use the variable. this makes it
        possible to traverse the graph in both directions. this is useful for
        determining whether to do some optimizations. for example, a fusion
        operation that removes V3 is not very helpful if V3 is also needed for
        some other apply node. fusion operations result in a composite op that
        takes a minigraph of theano scalars and uses this to do elemwise
        operations on theano tensors
         3. Optimization
               How well do optimizations apply to new ops?
                 Usually there are no optimizations for new ops. In fact, new
        ops can disrupt patterns and break currently working optimizations.
        Since the Print op, for example, is not known by any optimization,
        setting a Print op in the middle of a pattern that is usually
        optimized out will block the optimization. for example, log(1+x)
        optimizes to log1p(x) but log(1+Print(x)) is unaffected by
        optimizations.
                 One exception is elemwise ops. If you implement your new op
        as a scalar op then it will automatically work with all the elemwise
        fusion machinery.

                 Local optimizations try to replace some node in the graph
        with a different node. In the case of log(1+x), we want to replace the
        log node.

                 def opt_log1p(node):
                    if not isinstance(node.op,Elemwise):
                       return
                    if not isinstance(node.op.scalar_op, log):
                       return
                    inp = node.inputs[0]
                    if not inp.owner:
                       return
                    if not isinstance(inp.owner.op, add):
                       return
                    inp2 = inp.owner.inputs
                    check that this has length 2, and that one of the inputs
        is 1. assign the other input to x
                    return log1p(x)


         4. Linker
               The linker uses a python loop to execute the code associated
               with all the Apply nodes in the graph in the correct order.
               the cvm is a linker that replaces this python loop with a c
               loop to avoid continuously changing between python and c.
               The CVM is faster for 2 reasons:
                 1) Its internal logic in C, so no Python interpreter overhead.
                 2) It makes native calls from the VM logic into thunks that
                 have been compiled using the CLinker.
               the vm is a linker that was developed to prototype the cvm. it
        was easier to develop the vm in python then translate it to c instead
        of just writing it in c from scratch
               cvm stands for c virtual machine.




    """
    if updates is None:
        updates = []

    if isinstance(updates, dict) and \
            not isinstance(updates, gof.python25.OrderedDict):
        warnings.warn(
            "The parameter 'updates' of theano.function()"
            " expects an OrderedDict,"
            " got " + str(type(updates)) + ". Using "
            "a standard dictionary here results in "
            "non-deterministic behavior. You should use an OrderedDict"
            " if you are using Python 2.7, or use a list of (shared, update)"
            " pairs. Do not just convert your dictionary to this type before"
            " the call as the conversion will still be non-deterministic.")

    if givens is None:
        givens = []
    if not isinstance(inputs, (list, tuple)):
        raise Exception("Input variables of a Theano function should be"
                        " contained in a list, even when there is a single input.")

    # compute some features of the arguments:
    uses_In = any([isinstance(i, In) for i in inputs])  # N.B. the square brackets are ncessary
    uses_tuple = any([isinstance(i, (list, tuple)) for i in inputs])  # N.B. the square brackets are ncessary
    uses_updates = (updates != [])
    uses_givens = (givens != [])

    # See if we have any mutable / borrow inputs
    check_for_aliased_inputs = False
    for i in inputs:
        if (isinstance(i, In) and ((hasattr(i, 'borrow') and i.borrow) or
                                   (hasattr(i, 'mutable') and i.mutable))):
            check_for_aliased_inputs = True

    if uses_In or uses_tuple:
        # we must use old semantics in this case.
        if profile:
            raise NotImplementedError('profiling not supported in old-style function')
        if uses_updates or uses_givens:
            raise NotImplementedError("In() instances and tuple inputs triggers the old semantics, which disallow using updates and givens")
        fn = orig_function(inputs, outputs,
                           mode=mode,
                           accept_inplace=accept_inplace, name=name)
    else:
        #note: pfunc will also call orig_function-- orig_function is a choke point
        #      that all compilation must pass through
        fn = pfunc(params=inputs,
                outputs=outputs,
                mode=mode,
                updates=updates,
                givens=givens,
                no_default_updates=no_default_updates,
                accept_inplace=accept_inplace, name=name,
                rebuild_strict=rebuild_strict,
                allow_input_downcast=allow_input_downcast,
                on_unused_input=on_unused_input,
                profile=profile)
    # We need to add the flag check_aliased inputs if we have any mutable or
    # borrowed used defined inputs
    fn._check_for_aliased_inputs = check_for_aliased_inputs
    return fn
コード例 #4
0
ファイル: module.py プロジェクト: NicolasBouchard/Theano
    def build(self, mode, memo, allocate_all=False):
        """Compile a function for this Method.

        :param allocate_all: if True, storage will be
            allocated for all needed Variables even if there is no
            associated storage for them in the memo. If allocate_all is
            False, storage will only be allocated for Variables that are
            reachable from the inputs list.

        :returns: a function that implements this method
        :rtype: `Function` instance

        """
        if self in memo:
            return memo[self]

        self.resolve_all() # resolve all so we don't have to mess with strings
        def get_storage(r, require=False):
            # If require is True, we can only get storage from the memo.
            try:
                return memo[r]
            except KeyError:
                if require:
                    raise AllocationError('There is no storage associated to %s used by %s = %s.'
                                          ' Verify that it is indeed a Member of the'
                                          ' enclosing module or of one of its submodules.' % (r, self.name, self))
                else:
                    return io.In(variable=r,
                            value=gof.Container(r,
                                storage=[getattr(r, 'data', None)],
                                readonly=(isinstance(r, gof.Constant))),
                            mutable=False)
        inputs = self.inputs

        # Deal with explicit inputs
        inputs = []
        for input in self.inputs:
            if type(input) is io.In:
                inputs.append(input)
            elif isinstance(input, gof.Variable):
                input_in = io.In(
                        variable=input,
                        mutable=False)
                inputs.append(input_in)
            else:
                raise TypeError(input, type(input))

        # Deal with updates to shared storage
        for k, v in self.updates.iteritems():
            assert isinstance(k, gof.Variable)
            if isinstance(k, gof.Constant):
                raise TypeError('Module Constants cannot be updated', k)
            assert isinstance(v, gof.Variable)

            #identify an input for variable k
            input_k = None
            for input in inputs:
                if input.variable == k:
                    input_k = input

            #print 'METHOD UPDATE', k, v, input_k
            if input_k is None:
                # this is an implicit input,
                # use shared storage
                input_k = io.In(
                        variable=k,
                        update=v,
                        value=get_storage(k, not allocate_all).value,
                        mutable=True,
                        implicit = True)
                inputs.append(input_k)
            else:
                raise ValueError(('Variable listed in both inputs and updates.'
                    ' Use inputs to use your own storage, use updates to '
                    'work on module-shared storage'), k)

        # Deal with module inputs that are not updated

        outputs = self.outputs
        _inputs = [x.variable for x in inputs]
        # Grab the variables that are not accessible from either the inputs or the updates.
        if outputs is None:
            outputs_list = []
        else:
            if isinstance(outputs, (list, tuple)):
                outputs_list = list(outputs)
            else:
                outputs_list = [outputs]

        #backport
        #outputs_list = [] if outputs is None else (list(outputs) if isinstance(outputs, (list, tuple)) else [outputs])

        outputs_variable_list = []
        for o in outputs_list:
            if isinstance(o, io.Out):
                outputs_variable_list += [o.variable]
            else:
                outputs_variable_list += [o]

        #backport
        #outputs_variable_list = [o.variable if isinstance(o, io.Out) else o for o in outputs_list]
        for input in gof.graph.inputs(outputs_variable_list
                                      + [x.update for x in inputs if getattr(x, 'update', False)],
                                      blockers = _inputs):
            if input not in _inputs:
                # Add this input to the inputs; we require that storage already exists for them,
                # but otherwise they are immutable.
                if isinstance(input, gof.Value): # and not isinstance(input, gof.Constant):
                    #input might be Value or Constant
                    storage = get_storage(input)

                    assert type(storage) is io.In
                    container = storage.value
                    #the user is allowed to change this value between function calls if it isn't a constant
                    assert container.readonly == (isinstance(input, gof.Constant))
                    #the function is not allowed to change this value
                    assert storage.mutable == False
                else:
                    storage = get_storage(input, not allocate_all)

                # Declare as an implicit input.
                # TODO Note from OD: is this dangerous? (in case this storage
                # is shared, and would sometimes need to be implicit, sometimes
                # not).
                storage.implicit = True

                assert type(storage) is io.In
                inputs.append(storage)

        if self.mode is None:
            effective_mode = mode
        else:
            effective_mode = self.mode

        # We ignore unused inputs, since all the inputs are passed
        rval = F.orig_function(inputs, outputs, effective_mode,
                on_unused_input='ignore')
        memo[self] = rval
        return rval
コード例 #5
0
def function(inputs,
             outputs=None,
             mode=None,
             updates=[],
             givens=[],
             no_default_updates=False,
             accept_inplace=False,
             name=None,
             rebuild_strict=True,
             allow_input_downcast=None,
             profile=None):
    """
    Return a callable object that will calculate `outputs` from `inputs`.

    :type inputs: list of either Variable or Param instances.
    :param inputs: function parameters, these are not allowed to be shared
    variables

    :type outputs: list of Variables or Out instances
    :param outputs: expressions to compute

    :type mode: string or `Mode` instance.
    :param mode: compilation mode

    :type updates: iterable over pairs (shared_variable, new_expression). List, tuple or dict.
    :param updates: update the values for SharedVariable inputs according to these expressions

    :type givens: iterable over pairs (Var1, Var2) of Variables. List, tuple or dict.  The Var1
    and Var2 in each pair must have the same Type.

    :param givens: specific substitutions to make in the computation graph (Var2 replaces
    Var1).

    :type no_default_updates: either bool or list of Variables
    :param no_default_updates: if True, do not perform any automatic update on Variables.
    If False (default), perform them all. Else, perform automatic updates on all Variables
    that are neither in "updates" nor in "no_default_updates".

    :param name: an optional name for this function. The profile mode will print the time spent in this function.

    :rtype: Function instance
    :returns: a callable object that will compute the outputs (given the inputs)
    and update the implicit function arguments according to the `updates`.

    :param rebuild_strict: True (Default) is the safer and better tested setting, in which case
    `givens` must substitute new variables with the same Type as the variables they replace.
    False is a you-better-know-what-you-are-doing setting, that permits `givens` to replace
    variables with new variables of any Type.  The consequence of changing a Type is that all
    results depending on that variable may have a different Type too (the graph is rebuilt from
    inputs to outputs).  If one of the new types does not make sense for one of the Ops in the
    graph, an Exception will be raised.

    :type allow_input_downcast: Boolean or None
    :param allow_input_downcast: True means that the values passed as
    inputs when calling the function can be silently downcasted to fit
    the dtype of the corresponding Variable, which may lose precision.
    False means that it will only be cast to a more general, or
    precise, type. None (default) is almost like False, but allows
    downcasting of Python float scalars to floatX.

    :type profile: None, True, or ProfileStats instance
    :param profile: accumulate profiling information into a given ProfileStats
    instance. If argument is `True` then a new ProfileStats instance will be
    used.  This profiling object will be available via self.profile.

    :note: Regarding givens: Be careful to make sure that these substitutions are
    independent--behaviour when Var1 of one pair appears in the graph leading to Var2 in
    another expression is undefined.  Replacements specified with givens are different from
    optimizations in that Var2 is not expected to be equivalent to Var1.

    """
    #tuple are used in some tests, as we accepted them in the past
    #I prefer to allow it as they act the same as list for what they are used.
    if not isinstance(inputs, (list, tuple)):
        raise Exception(
            "Inputs variable of a Theano function should be contained in a list, even when there is a single input."
        )

    # compute some features of the arguments:
    uses_In = any([isinstance(i, In)
                   for i in inputs])  #N.B. the square brackets are ncessary
    uses_tuple = any([isinstance(i, (list, tuple))
                      for i in inputs])  #N.B. the square brackets are ncessary
    uses_updates = (updates != [])
    uses_givens = (givens != [])

    # See if we have any mutable / borrow inputs
    check_for_aliased_inputs = False
    for i in inputs:
        if (isinstance(i, In) and ((hasattr(i, 'borrow') and i.borrow) or
                                   (hasattr(i, 'mutable') and i.mutable))):
            check_for_aliased_inputs = True

    if uses_In or uses_tuple:
        # we must use old semantics in this case.
        if profile:
            raise NotImplementedError(
                'profiling not supported in old-style function')
        if uses_updates or uses_givens:
            raise NotImplementedError(
                "In() instances and tuple inputs triggers the old semantics, which disallow using updates and givens"
            )
        fn = orig_function(inputs,
                           outputs,
                           mode=mode,
                           accept_inplace=accept_inplace,
                           name=name)
    else:
        fn = pfunc(params=inputs,
                   outputs=outputs,
                   mode=mode,
                   updates=updates,
                   givens=givens,
                   no_default_updates=no_default_updates,
                   accept_inplace=accept_inplace,
                   name=name,
                   rebuild_strict=rebuild_strict,
                   allow_input_downcast=allow_input_downcast,
                   profile=profile)
    # We need to add the flag check_aliased inputs if we have any mutable or
    # borrowed used defined inputs
    fn._check_for_aliased_inputs = check_for_aliased_inputs
    return fn
コード例 #6
0
ファイル: function.py プロジェクト: olivierverdier/Theano
def function(inputs, outputs=None, mode=None, updates=[], givens=[],
             no_default_updates=False, accept_inplace=False, name=None,
             rebuild_strict=True, allow_input_downcast=None, profile=None):
    """
    Return a callable object that will calculate `outputs` from `inputs`.

    :type inputs: list of either Variable or Param instances.
    :param inputs: function parameters, these are not allowed to be shared
    variables

    :type outputs: list of Variables or Out instances
    :param outputs: expressions to compute

    :type mode: string or `Mode` instance.
    :param mode: compilation mode

    :type updates: iterable over pairs (shared_variable, new_expression). List, tuple or dict.
    :param updates: update the values for SharedVariable inputs according to these expressions

    :type givens: iterable over pairs (Var1, Var2) of Variables. List, tuple or dict.  The Var1
    and Var2 in each pair must have the same Type.

    :param givens: specific substitutions to make in the computation graph (Var2 replaces
    Var1).

    :type no_default_updates: either bool or list of Variables
    :param no_default_updates: if True, do not perform any automatic update on Variables.
    If False (default), perform them all. Else, perform automatic updates on all Variables
    that are neither in "updates" nor in "no_default_updates".

    :param name: an optional name for this function. The profile mode will print the time spent in this function.

    :rtype: Function instance
    :returns: a callable object that will compute the outputs (given the inputs)
    and update the implicit function arguments according to the `updates`.

    :param rebuild_strict: True (Default) is the safer and better tested setting, in which case
    `givens` must substitute new variables with the same Type as the variables they replace.
    False is a you-better-know-what-you-are-doing setting, that permits `givens` to replace
    variables with new variables of any Type.  The consequence of changing a Type is that all
    results depending on that variable may have a different Type too (the graph is rebuilt from
    inputs to outputs).  If one of the new types does not make sense for one of the Ops in the
    graph, an Exception will be raised.

    :type allow_input_downcast: Boolean or None
    :param allow_input_downcast: True means that the values passed as
    inputs when calling the function can be silently downcasted to fit
    the dtype of the corresponding Variable, which may lose precision.
    False means that it will only be casted to a more general, or
    precise, type. None (default) is almost like False, but allows
    downcasting of Python float scalars to floatX.

    :type profile: None, True, or ProfileStats instance
    :param profile: accumulate profiling information into a given ProfileStats
    instance. If argument is `True` then a new ProfileStats instance will be
    used.  This profiling object will be available via self.profile.

    :note: Regarding givens: Be careful to make sure that these substitutions are
    independent--behaviour when Var1 of one pair appears in the graph leading to Var2 in
    another expression is undefined.  Replacements specified with givens are different from
    optimizations in that Var2 is not expected to be equivalent to Var1.

    """
    #tuple are used in some tests, as we accepted them in the past
    #I prefer to allow it as they act the same as list for what they are used.
    if not isinstance(inputs,(list,tuple)):
        raise Exception("Inputs variable of a Theano function should be contained in a list, even when there is a single input.")

    # compute some features of the arguments:
    uses_In = any([isinstance(i, In) for i in inputs]) #N.B. the square brackets are ncessary
    uses_tuple = any([isinstance(i, (list, tuple)) for i in inputs])#N.B. the square brackets are ncessary
    uses_updates = (updates != [])
    uses_givens = (givens != [])

    # See if we have any mutable / borrow inputs
    check_for_aliased_inputs = False
    for i in inputs:
        if (isinstance(i, In) and ( (hasattr(i,'borrow') and i.borrow) or
                                   (hasattr(i,'mutable') and i.mutable)) ):
            check_for_aliased_inputs = True

    if uses_In or uses_tuple:
        # we must use old semantics in this case.
        if profile:
            raise NotImplementedError('profiling not supported in old-style function')
        if uses_updates or uses_givens:
            raise NotImplementedError("In() instances and tuple inputs triggers the old semantics, which disallow using updates and givens")
        fn =  orig_function(inputs, outputs,
                mode=mode,
                accept_inplace=accept_inplace, name=name)
    else:
        fn = pfunc(params=inputs,
                outputs=outputs,
                mode=mode,
                updates=updates,
                givens=givens,
                no_default_updates=no_default_updates,
                accept_inplace=accept_inplace,name=name,
                rebuild_strict=rebuild_strict,
                allow_input_downcast=allow_input_downcast,
                profile=profile)
    # We need to add the flag check_aliased inputs if we have any mutable or
    # borrowed used defined inputs
    fn._check_for_aliased_inputs = check_for_aliased_inputs
    return fn
コード例 #7
0
ファイル: module.py プロジェクト: xinfanmeng/Theano
    def build(self, mode, memo, allocate_all=False):
        """Compile a function for this Method.

        :param allocate_all: if True, storage will be
            allocated for all needed Variables even if there is no
            associated storage for them in the memo. If allocate_all is
            False, storage will only be allocated for Variables that are
            reachable from the inputs list.

        :returns: a function that implements this method
        :rtype: `Function` instance

        """
        if self in memo:
            return memo[self]

        self.resolve_all()  # resolve all so we don't have to mess with strings

        def get_storage(r, require=False):
            # If require is True, we can only get storage from the memo.
            try:
                return memo[r]
            except KeyError:
                if require:
                    raise AllocationError(
                        'There is no storage associated to %s used by %s = %s.'
                        ' Verify that it is indeed a Member of the'
                        ' enclosing module or of one of its submodules.' %
                        (r, self.name, self))
                else:
                    return io.In(variable=r,
                                 value=gof.Container(
                                     r,
                                     storage=[getattr(r, 'data', None)],
                                     readonly=(isinstance(r, gof.Constant))),
                                 mutable=False)

        inputs = self.inputs

        # Deal with explicit inputs
        inputs = []
        for input in self.inputs:
            if type(input) is io.In:
                inputs.append(input)
            elif isinstance(input, gof.Variable):
                input_in = io.In(variable=input, mutable=False)
                inputs.append(input_in)
            else:
                raise TypeError(input, type(input))

        # Deal with updates to shared storage
        for k, v in self.updates.iteritems():
            assert isinstance(k, gof.Variable)
            if isinstance(k, gof.Constant):
                raise TypeError('Module Constants cannot be updated', k)
            assert isinstance(v, gof.Variable)

            #identify an input for variable k
            input_k = None
            for input in inputs:
                if input.variable == k:
                    input_k = input

            #print 'METHOD UPDATE', k, v, input_k
            if input_k is None:
                # this is an implicit input,
                # use shared storage
                input_k = io.In(variable=k,
                                update=v,
                                value=get_storage(k, not allocate_all).value,
                                mutable=True,
                                implicit=True)
                inputs.append(input_k)
            else:
                raise ValueError(
                    ('Variable listed in both inputs and updates.'
                     ' Use inputs to use your own storage, use updates to '
                     'work on module-shared storage'), k)

        # Deal with module inputs that are not updated

        outputs = self.outputs
        _inputs = [x.variable for x in inputs]
        # Grab the variables that are not accessible from either the inputs or the updates.
        if outputs is None:
            outputs_list = []
        else:
            if isinstance(outputs, (list, tuple)):
                outputs_list = list(outputs)
            else:
                outputs_list = [outputs]

        #backport
        #outputs_list = [] if outputs is None else (list(outputs) if isinstance(outputs, (list, tuple)) else [outputs])

        outputs_variable_list = []
        for o in outputs_list:
            if isinstance(o, io.Out):
                outputs_variable_list += [o.variable]
            else:
                outputs_variable_list += [o]

        #backport
        #outputs_variable_list = [o.variable if isinstance(o, io.Out) else o for o in outputs_list]
        for input in gof.graph.inputs(
                outputs_variable_list +
            [x.update for x in inputs if getattr(x, 'update', False)],
                blockers=_inputs):
            if input not in _inputs:
                # Add this input to the inputs; we require that storage already exists for them,
                # but otherwise they are immutable.
                if isinstance(input, gof.Constant):
                    #input might be Constant
                    storage = get_storage(input)

                    assert type(storage) is io.In
                    container = storage.value
                    #the user is allowed to change this value between function calls if it isn't a constant
                    assert container.readonly == (isinstance(
                        input, gof.Constant))
                    #the function is not allowed to change this value
                    assert storage.mutable == False
                else:
                    storage = get_storage(input, not allocate_all)

                # Declare as an implicit input.
                # TODO Note from OD: is this dangerous? (in case this storage
                # is shared, and would sometimes need to be implicit, sometimes
                # not).
                storage.implicit = True

                assert type(storage) is io.In
                inputs.append(storage)

        if self.mode is None:
            effective_mode = mode
        else:
            effective_mode = self.mode

        # We ignore unused inputs, since all the inputs are passed
        rval = F.orig_function(inputs,
                               outputs,
                               effective_mode,
                               on_unused_input='ignore')
        memo[self] = rval
        return rval
コード例 #8
0
def function(inputs, outputs=None, mode=None, updates=None, givens=None,
             no_default_updates=False, accept_inplace=False, name=None,
             rebuild_strict=True, allow_input_downcast=None, profile=None,
             on_unused_input=None):
    """
    Return a callable object that will calculate `outputs` from `inputs`.

    :type inputs: list of either Variable or Param instances.
    :param inputs: function parameters, these are not allowed to be shared
    variables

    :type outputs: list of Variables or Out instances
    :param outputs: expressions to compute

    :type mode: string or `Mode` instance.
    :param mode: compilation mode

    :type updates: iterable over pairs (shared_variable, new_expression). List, tuple or dict.
    :param updates: update the values for SharedVariable inputs according to these expressions

    :type givens: iterable over pairs (Var1, Var2) of Variables. List, tuple or dict.  The Var1
    and Var2 in each pair must have the same Type.

    :param givens: specific substitutions to make in the computation graph (Var2 replaces
    Var1).

    :type no_default_updates: either bool or list of Variables
    :param no_default_updates: if True, do not perform any automatic update on Variables.
    If False (default), perform them all. Else, perform automatic updates on all Variables
    that are neither in "updates" nor in "no_default_updates".

    :param name: an optional name for this function. The profile mode will print the time spent in this function.

    :param rebuild_strict: True (Default) is the safer and better tested setting, in which case
    `givens` must substitute new variables with the same Type as the variables they replace.
    False is a you-better-know-what-you-are-doing setting, that permits `givens` to replace
    variables with new variables of any Type.  The consequence of changing a Type is that all
    results depending on that variable may have a different Type too (the graph is rebuilt from
    inputs to outputs).  If one of the new types does not make sense for one of the Ops in the
    graph, an Exception will be raised.

    :type allow_input_downcast: Boolean or None
    :param allow_input_downcast: True means that the values passed as
    inputs when calling the function can be silently downcasted to fit
    the dtype of the corresponding Variable, which may lose precision.
    False means that it will only be cast to a more general, or
    precise, type. None (default) is almost like False, but allows
    downcasting of Python float scalars to floatX.

    :type profile: None, True, or ProfileStats instance
    :param profile: accumulate profiling information into a given ProfileStats
    instance. If argument is `True` then a new ProfileStats instance will be
    used.  This profiling object will be available via self.profile.

    :param on_unused_input: What to do if a variable in the 'inputs' list is
    not used in the graph. Possible values are 'raise', 'warn', 'ignore' and None.

    :rtype: Function instance
    :returns: a callable object that will compute the outputs (given the inputs)
    and update the implicit function arguments according to the `updates`.

    :note: Regarding givens: Be careful to make sure that these substitutions are
    independent--behaviour when Var1 of one pair appears in the graph leading to Var2 in
    another expression is undefined.  Replacements specified with givens are different from
    optimizations in that Var2 is not expected to be equivalent to Var1.


    Internal documentation:

        What happens when you call theano.function?
           1. RemoveShared: shared variables are just an abstraction to make
        things more convenient for the user. The shared variables are
        transformed into implicit inputs and implicit outputs. The
        optimizations don't see which variables are shared or not.
           2. FunctionGraph: determines whether a graph is valid. For example,
        suppose
        you merge the two apply nodes in our example above, ie, do the
        addition and the tanh at the same time. If you propose a merge that
        changes the resulting dtype or broadcastable pattern of V4, the fgraph
        will detect this.
                    inplace optimizations: say we have an apply node that
        does + on V1 and V2, with output V3. We can change the output to be
        V1, to use less memory. theano must be told that this optimization is
        happening though, so that other parts of the graph are given the
        correct (pre + or post + ) version of V1.
                  fgraph will raise an error if any of these types of
        modifications causes an error
                  fgraph also adds a field called "clients" to all variables.
        clients is a list of apply nodes that use the variable. this makes it
        possible to traverse the graph in both directions. this is useful for
        determining whether to do some optimizations. for example, a fusion
        operation that removes V3 is not very helpful if V3 is also needed for
        some other apply node. fusion operations result in a composite op that
        takes a minigraph of theano scalars and uses this to do elemwise
        operations on theano tensors
         3. Optimization
               How well do optimizations apply to new ops?
                 Usually there are no optimizations for new ops. In fact, new
        ops can disrupt patterns and break currently working optimizations.
        Since the Print op, for example, is not known by any optimization,
        setting a Print op in the middle of a pattern that is usually
        optimized out will block the optimization. for example, log(1+x)
        optimizes to log1p(x) but log(1+Print(x)) is unaffected by
        optimizations.
                 One exception is elemwise ops. If you implement your new op
        as a scalar op then it will automatically work with all the elemwise
        fusion machinery.

                 Local optimizations try to replace some node in the graph
        with a different node. In the case of log(1+x), we want to replace the
        log node.

                 def opt_log1p(node):
                    if not isinstance(node.op,Elemwise):
                       return
                    if not isinstance(node.op.scalar_op, log,):
                       return
                    inp = node.inputs[0]
                    if not inp.owner:
                       return
                    if not isinstance(inp.owner.op, add):
                       return
                    inp2 = inp.owner.inputs
                    check that this has length 2, and that one of the inputs
        is 1. assign the other input to x
                    return log1p(x)


         4. Linker
               The linker uses a python loop to execute the code associated
               with all the Apply nodes in the graph in the correct order.
               the cvm is a linker that replaces this python loop with a c
               loop to avoid continuously changing between python and c.
               The CVM is faster for 2 reasons:
                 1) Its internal logic in C, so no Python interpreter overhead.
                 2) It makes native calls from the VM logic into thunks that
                 have been compiled using the CLinker.
               the vm is a linker that was developed to prototype the cvm. it
        was easier to develop the vm in python then translate it to c instead
        of just writing it in c from scratch
               cvm stands for c virtual machine.




    """
    #tuple are used in some tests, as we accepted them in the past
    #I prefer to allow it as they act the same as list for what they are used.
    if updates is None:
        updates = []
    if givens is None:
        givens = []
    if not isinstance(inputs, (list, tuple)):
        raise Exception("Inputs variable of a Theano function should be contained in a list, even when there is a single input.")

    # compute some features of the arguments:
    uses_In = any([isinstance(i, In) for i in inputs])  # N.B. the square brackets are ncessary
    uses_tuple = any([isinstance(i, (list, tuple)) for i in inputs])  # N.B. the square brackets are ncessary
    uses_updates = (updates != [])
    uses_givens = (givens != [])

    # See if we have any mutable / borrow inputs
    check_for_aliased_inputs = False
    for i in inputs:
        if (isinstance(i, In) and ((hasattr(i, 'borrow') and i.borrow) or
                                   (hasattr(i, 'mutable') and i.mutable))):
            check_for_aliased_inputs = True

    if uses_In or uses_tuple:
        # we must use old semantics in this case.
        if profile:
            raise NotImplementedError('profiling not supported in old-style function')
        if uses_updates or uses_givens:
            raise NotImplementedError("In() instances and tuple inputs triggers the old semantics, which disallow using updates and givens")
        fn = orig_function(inputs, outputs,
                           mode=mode,
                           accept_inplace=accept_inplace, name=name)
    else:
        #note: pfunc will also call orig_function-- orig_function is a choke point
        #      that all compilation must pass through
        fn = pfunc(params=inputs,
                outputs=outputs,
                mode=mode,
                updates=updates,
                givens=givens,
                no_default_updates=no_default_updates,
                accept_inplace=accept_inplace, name=name,
                rebuild_strict=rebuild_strict,
                allow_input_downcast=allow_input_downcast,
                on_unused_input=on_unused_input,
                profile=profile)
    # We need to add the flag check_aliased inputs if we have any mutable or
    # borrowed used defined inputs
    fn._check_for_aliased_inputs = check_for_aliased_inputs
    return fn