Esempio n. 1
0
    def __init__(self, inputs, outputs, grad_depth=1, **kwargs):
        if not isinstance(outputs, list):
            raise TypeError('outputs must be list', outputs)
        for i in inputs + outputs:
            if not isinstance(i, gof.Variable):
                raise TypeError(
                        'inputs and outputs must be Variable instances', i)
        if 'updates' in kwargs:
            raise TypeError('updates are not allowed in kwargs')

        # TODO: the graph may have implicit inputs like
        #       SharedVariable instances.
        #       what impact to they have on the validity of this Op?
        self.fn = orig_function(inputs, outputs, **kwargs)
        self.inputs = inputs
        self.outputs = outputs
        self.input_types = [input.type for input in inputs]
        self.output_types = [output.type for output in outputs]

        if grad_depth > 0:
            output_grads = [t() for t in self.output_types]
            gd = G.grad_sources_inputs(zip(self.outputs, output_grads),
                    self.inputs)
            gs = map(gd.get, self.inputs)
            self.grad_ops = []
            for g in gs:
                if g is None:
                    self.grad_ops.append(lambda *args: None)
                else:
                    # It is normal if some inputs are not needed in order
                    # to compute the gradient, so we ignore them.
                    self.grad_ops.append(OpFromGraph(inputs + output_grads,
                                                     [g],
                                                     grad_depth=grad_depth - 1,
                                                     on_unused_input='ignore'))
Esempio n. 2
0
 def make_thunk(self, node, storage_map, compute_map, no_recycling):
     ret = super(OpFromGraph, self).make_thunk(node, storage_map,
                                               compute_map, no_recycling)
     if not hasattr(self, "fn"):
         self.fn = orig_function(self.new_inputs, self.new_outputs,
                                 **self.kwargs)
     return ret
Esempio n. 3
0
    def __init__(self, inputs, outputs, grad_depth=1, **kwargs):
        if not isinstance(outputs, list):
            raise TypeError('outputs must be list', outputs)
        for i in inputs + outputs:
            if not isinstance(i, gof.Variable):
                raise TypeError(
                        'inputs and outputs must be Variable instances', i)
        if 'updates' in kwargs:
            raise TypeError('updates are not allowed in kwargs')

        # TODO: the graph may have implicit inputs like Value and
        #       SharedVariable instances.
        #       what impact to they have on the validity of this Op?
        self.fn = orig_function(inputs, outputs, **kwargs)
        self.inputs = inputs
        self.outputs = outputs
        self.input_types = [input.type for input in inputs]
        self.output_types = [output.type for output in outputs]

        if grad_depth > 0:
            output_grads = [t() for t in self.output_types]
            gd = G.grad_sources_inputs(zip(self.outputs, output_grads),
                    self.inputs)
            gs = map(gd.get, self.inputs)
            self.grad_ops = []
            for g in gs:
                if g is None:
                    self.grad_ops.append(lambda *args: None)
                else:
                    # It is normal if some inputs are not needed in order
                    # to compute the gradient, so we ignore them.
                    self.grad_ops.append(OpFromGraph(inputs + output_grads,
                                                     [g],
                                                     grad_depth=grad_depth - 1,
                                                     on_unused_input='ignore'))
Esempio n. 4
0
 def make_thunk(self, node, storage_map, compute_map, no_recycling):
     ret = super(OpFromGraph, self).make_thunk(node, storage_map,
                                               compute_map, no_recycling)
     if not hasattr(self, "fn"):
         self.fn = orig_function(self.new_inputs,
                                 self.new_outputs,
                                 **self.kwargs)
     return ret
Esempio n. 5
0
    def __init__(self, inputs, outputs, grad_depth=1, **kwargs):
        if not isinstance(outputs, list):
            raise TypeError('outputs must be list', outputs)
        for i in inputs + outputs:
            if not isinstance(i, gof.Variable):
                raise TypeError(
                        'inputs and outputs must be Variable instances', i)
        if 'updates' in kwargs:
            raise TypeError('updates are not allowed in kwargs')

        # TODO: the graph may have implicit inputs like
        #       SharedVariable instances.
        #       what impact to they have on the validity of this Op?
        self.fn = orig_function(inputs, outputs, **kwargs)
        self.inputs = inputs
        self.outputs = outputs
        self.input_types = [input.type for input in inputs]
        self.output_types = [output.type for output in outputs]

        if grad_depth > 0:
            output_grads = [t() for t in self.output_types]
            # OpFromGraph doesn't implement a connection_pattern, so for now we regard
            # all inputs and outputs as connected. This will compute the right numerical
            # value for the gradients but could fail to raise the disconnected inputs error
            # in some cases.
            gs = G.grad(cost=None, known_grads=dict(zip(self.outputs, output_grads)),
                    wrt=self.inputs, disconnected_inputs='ignore')
            self.grad_ops = []
            for g in gs:
                if g is None:
                    self.grad_ops.append(lambda *args: None)
                else:
                    # It is normal if some inputs are not needed in order
                    # to compute the gradient, so we ignore them.
                    self.grad_ops.append(OpFromGraph(inputs + output_grads,
                                                     [g],
                                                     grad_depth=grad_depth - 1,
                                                     on_unused_input='ignore'))
Esempio n. 6
0
def function(
    inputs,
    outputs=None,
    mode=None,
    updates=None,
    givens=None,
    no_default_updates=False,
    accept_inplace=False,
    name=None,
    rebuild_strict=True,
    allow_input_downcast=None,
    profile=None,
    on_unused_input=None,
):
    """
    Return a callable object that will calculate `outputs` from `inputs`.

    :type inputs: list of either Variable or Param instances.
    :param inputs: function parameters, these are not allowed to be shared
    variables

    :type outputs: list or dict of Variables or Out instances.  If it is a
                   dict, the keys must be strings
    :param outputs: expressions to compute

    :type mode: string or `Mode` instance.
    :param mode: compilation mode

    :type updates: iterable over pairs (shared_variable, new_expression).
                   List, tuple or OrderedDict.
    :param updates: update the values for SharedVariable inputs
                    according to these expressions

    :type givens: iterable over pairs (Var1, Var2) of Variables. List,
                  tuple or dict.  The Var1 and Var2 in each pair must
                  have the same Type.
    :param givens: specific substitutions to make in the computation
                   graph (Var2 replaces Var1).

    :type no_default_updates: either bool or list of Variables
    :param no_default_updates: if True, do not perform any automatic
        update on Variables.  If False (default), perform them
        all. Else, perform automatic updates on all Variables that are
        neither in "updates" nor in "no_default_updates".

    :param name: an optional name for this function. The profile mode
        will print the time spent in this function.

    :param rebuild_strict: True (Default) is the safer and better
        tested setting, in which case `givens` must substitute new
        variables with the same Type as the variables they replace.
        False is a you-better-know-what-you-are-doing setting, that
        permits `givens` to replace variables with new variables of
        any Type.  The consequence of changing a Type is that all
        results depending on that variable may have a different Type
        too (the graph is rebuilt from inputs to outputs).  If one of
        the new types does not make sense for one of the Ops in the
        graph, an Exception will be raised.

    :type allow_input_downcast: Boolean or None
    :param allow_input_downcast: True means that the values passed as
        inputs when calling the function can be silently downcasted to
        fit the dtype of the corresponding Variable, which may lose
        precision.  False means that it will only be cast to a more
        general, or precise, type. None (default) is almost like
        False, but allows downcasting of Python float scalars to
        floatX.

    :type profile: None, True, or ProfileStats instance
    :param profile: accumulate profiling information into a given
        ProfileStats instance. If argument is `True` then a new
        ProfileStats instance will be used.  This profiling object
        will be available via self.profile.

    :param on_unused_input: What to do if a variable in the 'inputs'
        list is not used in the graph. Possible values are 'raise',
        'warn', 'ignore' and None.

    :rtype: Function instance
    :returns: a callable object that will compute the outputs (given
        the inputs) and update the implicit function arguments
        according to the `updates`.

    :note: Regarding givens: Be careful to make sure that these
        substitutions are independent--behaviour when Var1 of one pair
        appears in the graph leading to Var2 in another expression is
        undefined.  Replacements specified with givens are different
        from optimizations in that Var2 is not expected to be
        equivalent to Var1.


    Internal documentation:

        What happens when you call theano.function?
           1. RemoveShared: shared variables are just an abstraction to make
        things more convenient for the user. The shared variables are
        transformed into implicit inputs and implicit outputs. The
        optimizations don't see which variables are shared or not.
           2. FunctionGraph: determines whether a graph is valid. For example,
        suppose
        you merge the two apply nodes in our example above, ie, do the
        addition and the tanh at the same time. If you propose a merge that
        changes the resulting dtype or broadcastable pattern of V4, the fgraph
        will detect this.
                    inplace optimizations: say we have an apply node that
        does + on V1 and V2, with output V3. We can change the output to be
        V1, to use less memory. theano must be told that this optimization is
        happening though, so that other parts of the graph are given the
        correct (pre + or post + ) version of V1.
                  fgraph will raise an error if any of these types of
        modifications causes an error
                  fgraph also adds a field called "clients" to all variables.
        clients is a list of apply nodes that use the variable. this makes it
        possible to traverse the graph in both directions. this is useful for
        determining whether to do some optimizations. for example, a fusion
        operation that removes V3 is not very helpful if V3 is also needed for
        some other apply node. fusion operations result in a composite op that
        takes a minigraph of theano scalars and uses this to do elemwise
        operations on theano tensors
         3. Optimization
               How well do optimizations apply to new ops?
                 Usually there are no optimizations for new ops. In fact, new
        ops can disrupt patterns and break currently working optimizations.
        Since the Print op, for example, is not known by any optimization,
        setting a Print op in the middle of a pattern that is usually
        optimized out will block the optimization. for example, log(1+x)
        optimizes to log1p(x) but log(1+Print(x)) is unaffected by
        optimizations.
                 One exception is elemwise ops. If you implement your new op
        as a scalar op then it will automatically work with all the elemwise
        fusion machinery.

                 Local optimizations try to replace some node in the graph
        with a different node. In the case of log(1+x), we want to replace the
        log node.

                 def opt_log1p(node):
                    if not isinstance(node.op,Elemwise):
                       return
                    if not isinstance(node.op.scalar_op, log):
                       return
                    inp = node.inputs[0]
                    if not inp.owner:
                       return
                    if not isinstance(inp.owner.op, add):
                       return
                    inp2 = inp.owner.inputs
                    check that this has length 2, and that one of the inputs
        is 1. assign the other input to x
                    return log1p(x)


         4. Linker
               The linker uses a Python loop to execute the code associated
               with all the Apply nodes in the graph in the correct order.
               The CVM is a linker that replaces this Python loop with a C
               loop to avoid continuously changing between Python and C.
               The CVM is faster for 2 reasons:
                 1) Its internal logic is in C, so no Python interpreter
                    overhead.
                 2) It makes native calls from the VM logic into thunks that
                    have been compiled using the CLinker.
               The VM is a linker that was developed to prototype the CVM. it
        was easier to develop the VM in Python then translate it to C instead
        of just writing it in C from scratch.
               CVM stands for C Virtual Machine.
    """
    if isinstance(outputs, dict):
        output_items = list(outputs.items())

        for item_pair in output_items:
            assert isinstance(item_pair[0], string_types)

        output_items_sorted = sorted(output_items)

        output_keys = []
        outputs = []
        for pair in output_items_sorted:
            output_keys.append(pair[0])
            outputs.append(pair[1])

    else:
        output_keys = None

    if name is None:
        # Determine possible file names
        source_file = re.sub("\.pyc?", ".py", __file__)
        compiled_file = source_file + "c"

        stack = tb.extract_stack()
        idx = len(stack) - 1

        last_frame = stack[idx]
        if last_frame[0] == source_file or last_frame[0] == compiled_file:
            func_frame = stack[idx - 1]
            while "theano/gof" in func_frame[0] and idx > 0:
                idx -= 1
                # This can hapen if we call var.eval()
                func_frame = stack[idx - 1]
            name = func_frame[0] + ":" + str(func_frame[1])

    if updates is None:
        updates = []

    if isinstance(updates, dict) and not isinstance(updates, compat.OrderedDict) and len(updates) > 1:
        warnings.warn(
            "The parameter 'updates' of theano.function()"
            " expects an OrderedDict,"
            " got " + str(type(updates)) + ". Using "
            "a standard dictionary here results in "
            "non-deterministic behavior. You should use an OrderedDict"
            " if you are using Python 2.7 (theano.compat.OrderedDict"
            " for older python), or use a list of (shared, update)"
            " pairs. Do not just convert your dictionary to this type before"
            " the call as the conversion will still be non-deterministic.",
            stacklevel=2,
        )

    if givens is None:
        givens = []
    if not isinstance(inputs, (list, tuple)):
        raise Exception(
            "Input variables of a Theano function should be "
            "contained in a list, even when there is a single "
            "input."
        )

    # compute some features of the arguments:
    uses_In = any([isinstance(i, In) for i in inputs])
    uses_tuple = any([isinstance(i, (list, tuple)) for i in inputs])
    uses_updates = bool(updates)
    uses_givens = bool(givens)

    # See if we have any mutable / borrow inputs
    check_for_aliased_inputs = False
    for i in inputs:
        if isinstance(i, In) and ((hasattr(i, "borrow") and i.borrow) or (hasattr(i, "mutable") and i.mutable)):
            check_for_aliased_inputs = True

    if uses_In or uses_tuple:
        # we must use old semantics in this case.
        if profile:
            raise NotImplementedError("profiling not supported in old-style " "function")
        if uses_updates or uses_givens:
            raise NotImplementedError(
                "In() instances and tuple inputs trigger the old " "semantics, which disallow using updates and givens"
            )
        fn = orig_function(inputs, outputs, mode=mode, accept_inplace=accept_inplace, name=name)
    else:
        # note: pfunc will also call orig_function-- orig_function is
        #      a choke point that all compilation must pass through
        fn = pfunc(
            params=inputs,
            outputs=outputs,
            mode=mode,
            updates=updates,
            givens=givens,
            no_default_updates=no_default_updates,
            accept_inplace=accept_inplace,
            name=name,
            rebuild_strict=rebuild_strict,
            allow_input_downcast=allow_input_downcast,
            on_unused_input=on_unused_input,
            profile=profile,
            output_keys=output_keys,
        )
    # We need to add the flag check_aliased inputs if we have any mutable or
    # borrowed used defined inputs
    fn._check_for_aliased_inputs = check_for_aliased_inputs
    return fn
Esempio n. 7
0
def function(inputs,
             outputs=None,
             mode=None,
             updates=None,
             givens=None,
             no_default_updates=False,
             accept_inplace=False,
             name=None,
             rebuild_strict=True,
             allow_input_downcast=None,
             profile=None,
             on_unused_input=None):
    """
    Return a callable object that will calculate `outputs` from `inputs`.

    Parameters
    ----------
    inputs : list of either Variable or Param instances.
        Function parameters, these are not allowed to be shared variables.
    outputs : list or dict of Variables or Out instances.
        If it is a dict, the keys must be strings. Expressions to compute.
    mode : string or `Mode` instance.
        Compilation mode.
    updates : iterable over pairs (shared_variable, new_expression). List, tuple
              or OrderedDict.
        Updates the values for SharedVariable inputs according to these
        expressions.
    givens : iterable over pairs (Var1, Var2) of Variables. List, tuple or dict.
             The Var1 and Var2 in each pair must have the same Type.
        Specific substitutions to make in the computation graph (Var2 replaces
        Var1).
    no_default_updates: either bool or list of Variables
        If True, do not perform any automatic update on Variables. If False
        (default), perform them all. Else, perform automatic updates on all
        Variables that are neither in "updates" nor in "no_default_updates".
    name : str
        An optional name for this function. The profile mode will print the time
        spent in this function.
    rebuild_strict : bool
        True (Default) is the safer and better tested setting, in which case
        `givens` must substitute new variables with the same Type as the
        variables they replace.
        False is a you-better-know-what-you-are-doing setting, that permits
        `givens` to replace variables with new variables of any Type.
        The consequence of changing a Type is that all results depending on that
        variable may have a different Type too (the graph is rebuilt from inputs
        to outputs). If one of the new types does not make sense for one of the
        Ops in the graph, an Exception will be raised.
    allow_input_downcast: bool or None
        True means that the values passed as inputs when calling the function
        can be silently downcasted to fit the dtype of the corresponding
        Variable, which may lose precision. False means that it will only be
        cast to a more general, or precise, type. None (default) is almost like
        False, but allows downcasting of Python float scalars to floatX.
    profile: None, True, or ProfileStats instance
        Accumulate profiling information into a given ProfileStats instance.
        If argument is `True` then a new ProfileStats instance will be used.
        This profiling object will be available via self.profile.
    on_unused_input
        What to do if a variable in the 'inputs' list is not used in the graph.
        Possible values are 'raise', 'warn', 'ignore' and None.

    Returns
    -------
    Function instance
        A callable object that will compute the outputs (given the inputs) and
        update the implicit function arguments according to the `updates`.

    Notes
    -----
    Regarding givens: Be careful to make sure that these
    substitutions are independent--behaviour when Var1 of one pair
    appears in the graph leading to Var2 in another expression is
    undefined.  Replacements specified with givens are different
    from optimizations in that Var2 is not expected to be
    equivalent to Var1.


    Internal documentation:

        What happens when you call theano.function?
           1. RemoveShared: shared variables are just an abstraction to make
        things more convenient for the user. The shared variables are
        transformed into implicit inputs and implicit outputs. The
        optimizations don't see which variables are shared or not.
           2. FunctionGraph: determines whether a graph is valid. For example,
        suppose
        you merge the two apply nodes in our example above, ie, do the
        addition and the tanh at the same time. If you propose a merge that
        changes the resulting dtype or broadcastable pattern of V4, the fgraph
        will detect this.
                    inplace optimizations: say we have an apply node that
        does + on V1 and V2, with output V3. We can change the output to be
        V1, to use less memory. theano must be told that this optimization is
        happening though, so that other parts of the graph are given the
        correct (pre + or post + ) version of V1.
                  fgraph will raise an error if any of these types of
        modifications causes an error
                  fgraph also adds a field called "clients" to all variables.
        clients is a list of apply nodes that use the variable. this makes it
        possible to traverse the graph in both directions. this is useful for
        determining whether to do some optimizations. for example, a fusion
        operation that removes V3 is not very helpful if V3 is also needed for
        some other apply node. fusion operations result in a composite op that
        takes a minigraph of theano scalars and uses this to do elemwise
        operations on theano tensors
         3. Optimization
               How well do optimizations apply to new ops?
                 Usually there are no optimizations for new ops. In fact, new
        ops can disrupt patterns and break currently working optimizations.
        Since the Print op, for example, is not known by any optimization,
        setting a Print op in the middle of a pattern that is usually
        optimized out will block the optimization. for example, log(1+x)
        optimizes to log1p(x) but log(1+Print(x)) is unaffected by
        optimizations.
                 One exception is elemwise ops. If you implement your new op
        as a scalar op then it will automatically work with all the elemwise
        fusion machinery.

                 Local optimizations try to replace some node in the graph
        with a different node. In the case of log(1+x), we want to replace the
        log node.

                 def opt_log1p(node):
                    if not isinstance(node.op,Elemwise):
                       return
                    if not isinstance(node.op.scalar_op, log):
                       return
                    inp = node.inputs[0]
                    if not inp.owner:
                       return
                    if not isinstance(inp.owner.op, add):
                       return
                    inp2 = inp.owner.inputs
                    check that this has length 2, and that one of the inputs
        is 1. assign the other input to x
                    return log1p(x)


         4. Linker
               The linker uses a Python loop to execute the code associated
               with all the Apply nodes in the graph in the correct order.
               The CVM is a linker that replaces this Python loop with a C
               loop to avoid continuously changing between Python and C.
               The CVM is faster for 2 reasons:
                 1) Its internal logic is in C, so no Python interpreter
                    overhead.
                 2) It makes native calls from the VM logic into thunks that
                    have been compiled using the CLinker.
               The VM is a linker that was developed to prototype the CVM. it
        was easier to develop the VM in Python then translate it to C instead
        of just writing it in C from scratch.
               CVM stands for C Virtual Machine.

    """
    if isinstance(outputs, dict):
        output_items = list(outputs.items())

        for item_pair in output_items:
            assert isinstance(item_pair[0], string_types)

        output_items_sorted = sorted(output_items)

        output_keys = []
        outputs = []
        for pair in output_items_sorted:
            output_keys.append(pair[0])
            outputs.append(pair[1])

    else:
        output_keys = None

    if name is None:
        # Determine possible file names
        source_file = re.sub('\.pyc?', '.py', __file__)
        compiled_file = source_file + 'c'

        stack = tb.extract_stack()
        idx = len(stack) - 1

        last_frame = stack[idx]
        if (last_frame[0] == source_file or last_frame[0] == compiled_file):
            func_frame = stack[idx - 1]
            while "theano/gof" in func_frame[0] and idx > 0:
                idx -= 1
                # This can hapen if we call var.eval()
                func_frame = stack[idx - 1]
            name = func_frame[0] + ':' + str(func_frame[1])

    if updates is None:
        updates = []

    if (isinstance(updates, dict)
            and not isinstance(updates, compat.OrderedDict)
            and len(updates) > 1):
        warnings.warn(
            "The parameter 'updates' of theano.function()"
            " expects an OrderedDict,"
            " got " + str(type(updates)) + ". Using "
            "a standard dictionary here results in "
            "non-deterministic behavior. You should use an OrderedDict"
            " if you are using Python 2.7 (theano.compat.OrderedDict"
            " for older python), or use a list of (shared, update)"
            " pairs. Do not just convert your dictionary to this type before"
            " the call as the conversion will still be non-deterministic.",
            stacklevel=2)

    if givens is None:
        givens = []
    if not isinstance(inputs, (list, tuple)):
        raise Exception("Input variables of a Theano function should be "
                        "contained in a list, even when there is a single "
                        "input.")

    # compute some features of the arguments:
    uses_In = any([isinstance(i, In) for i in inputs])
    uses_tuple = any([isinstance(i, (list, tuple)) for i in inputs])
    uses_updates = bool(updates)
    uses_givens = bool(givens)

    # See if we have any mutable / borrow inputs
    check_for_aliased_inputs = False
    for i in inputs:
        if (isinstance(i, In) and ((hasattr(i, 'borrow') and i.borrow) or
                                   (hasattr(i, 'mutable') and i.mutable))):
            check_for_aliased_inputs = True

    if uses_In or uses_tuple:
        # we must use old semantics in this case.
        if profile:
            raise NotImplementedError("profiling not supported in old-style "
                                      "function")
        if uses_updates or uses_givens:
            raise NotImplementedError(
                "In() instances and tuple inputs trigger the old "
                "semantics, which disallow using updates and givens")
        fn = orig_function(inputs,
                           outputs,
                           mode=mode,
                           accept_inplace=accept_inplace,
                           name=name)
    else:
        # note: pfunc will also call orig_function -- orig_function is
        #      a choke point that all compilation must pass through
        fn = pfunc(params=inputs,
                   outputs=outputs,
                   mode=mode,
                   updates=updates,
                   givens=givens,
                   no_default_updates=no_default_updates,
                   accept_inplace=accept_inplace,
                   name=name,
                   rebuild_strict=rebuild_strict,
                   allow_input_downcast=allow_input_downcast,
                   on_unused_input=on_unused_input,
                   profile=profile,
                   output_keys=output_keys)
    # We need to add the flag check_aliased inputs if we have any mutable or
    # borrowed used defined inputs
    fn._check_for_aliased_inputs = check_for_aliased_inputs
    return fn
Esempio n. 8
0
 def prepare_node(self, node, storage_map, compute_map, impl):
     if not hasattr(self, "fn") and impl == 'py':
         self.fn = orig_function(self.local_inputs, self.local_outputs,
                                 **self.kwargs)
         self.fn.trust_input = True
Esempio n. 9
0
 def prepare_node(self, node, storage_map, compute_map, impl):
     if not hasattr(self, "fn") and impl == 'py':
         self.fn = orig_function(self.local_inputs,
                                 self.local_outputs,
                                 **self.kwargs)
         self.fn.trust_input = True
Esempio n. 10
0
def pfunc(
    params,
    outputs=None,
    mode=None,
    updates=None,
    givens=None,
    no_default_updates=False,
    accept_inplace=False,
    name=None,
    rebuild_strict=True,
    allow_input_downcast=None,
    profile=None,
    on_unused_input=None,
    output_keys=None,
):
    """
    Function-constructor for graphs with shared variables.

    Parameters
    ----------
    params : list of either Variable or In instances
        Function parameters, these are not allowed to be shared variables.
    outputs : list of Variables or Out instances
        Expressions to compute.
    mode : string or `theano.compile.Mode` instance
        Compilation mode.
    updates : iterable over pairs (shared_variable, new_expression). List, tuple or dict.
        Update the values for SharedVariable inputs according to these
        expressions
    givens : iterable over pairs (Var1, Var2) of Variables. List, tuple or dict.
        The Var1 and Var2 in each pair must have the same Type. Specific
        substitutions to make in the computation graph (Var2 replaces Var1).
    no_default_updates : either bool or list of Variables
        If True, do not perform any automatic update on Variables.
        If False (default), perform them all. Else, perform automatic updates
        on all Variables that are neither in "updates" nor in
        "no_default_updates".
    accept_inplace : bool
        True iff the graph can contain inplace operations prior to the
        optimization phase (default is False). *Note* this parameter is unsupported,
        and its use is not recommended.
    name : None or string
        Attaches a name to the profiling result of this function.
    allow_input_downcast : bool
        True means that the values passed as inputs when calling the function
        can be silently downcasted to fit the dtype of the corresponding
        Variable, which may lose precision. False means that it will only be cast to a more
        general, or precise, type. None (default) is almost like
        False, but allows downcasting of Python float scalars to
        floatX.
    profile : None, True, str, or ProfileStats instance
        Accumulate profiling information into a given ProfileStats instance.
        None is the default, and means to use the value of config.profile.
        If argument is `True` then a new ProfileStats instance will be used.
        If argument is a string, a new ProfileStats instance will be created
        with that string as its `message` attribute. This profiling object will
        be available via self.profile.
    on_unused_input : {'raise', 'warn','ignore', None}
        What to do if a variable in the 'inputs' list is not used in the graph.

    Returns
    -------
    theano.compile.Function
        A callable object that will compute the outputs (given the inputs) and
        update the implicit function arguments according to the `updates`.

    Notes
    -----
    Regarding givens: Be careful to make sure that these substitutions are
    independent--behaviour when Var1 of one pair appears in the graph leading
    to Var2 in another expression is undefined. Replacements specified with
    givens are different from optimizations in that Var2 is not expected to be
    equivalent to Var1.

    """
    #
    # This function works by cloning the graph (except for the
    # inputs), and then shipping it off to compile.function (There it
    # will be cloned again, unnecessarily, because it doesn't know
    # that we already cloned it.)
    #
    # First, it clones the replacements named in the givens argument,
    # and points each Var1 to the clone of Var2.  Then it sets the
    # inputs in the clone dictionary.  After these steps, we are
    # assuming that the clone dictionary contains all the inputs to
    # the computation graph.
    #
    # Then it clones the outputs and the update expressions.  This
    # rebuilds a computation graph from the inputs and the givens.
    #
    if updates is None:
        updates = []
    if givens is None:
        givens = []
    if profile is None:
        profile = config.profile or config.print_global_stats
        # profile -> True or False
        if profile is False:
            profile = None
    if profile is True:
        profile = ProfileStats(message=name)
        # profile -> object
    elif type(profile) == str:
        profile = ProfileStats(message=profile)
    # profile is typically either False or an object at this point.
    # No need to block other objects being passed through though. It might be
    # useful.

    if not isinstance(params, (list, tuple)):
        raise Exception("in pfunc() the first argument must be a list or " "a tuple")

    if not isinstance(no_default_updates, bool) and not isinstance(
        no_default_updates, list
    ):
        raise TypeError("no_default_update should be either a boolean or " "a list")

    if len(updates) > 0 and any(
        isinstance(v, Variable) for v in iter_over_pairs(updates)
    ):
        raise ValueError(
            "The updates parameter must be an OrderedDict/dict or a list of "
            "lists/tuples with 2 elements"
        )

    # transform params into theano.compile.In objects.
    inputs = [
        _pfunc_param_to_in(p, allow_downcast=allow_input_downcast) for p in params
    ]

    # Check if some variable is present more than once in inputs
    in_variables = [input.variable for input in inputs]
    for i, v in enumerate(in_variables):
        if v in in_variables[(i + 1) :]:
            dup_v_i = in_variables.index(v, (i + 1))
            raise UnusedInputError(
                "Variable %s is used twice in inputs to theano.function, "
                "at indices %i and %i.  This would result in values "
                "provided for it being ignored. Please do not duplicate "
                "variables in the inputs list." % (v, i, dup_v_i)
            )

    # Check that we are not using `givens` to replace input variables, because
    # this typically does nothing, contrary to what one may expect.
    in_var_set = set(in_variables)
    try:
        givens_pairs = list(givens.items())
    except AttributeError:
        givens_pairs = givens
    for x, y in givens_pairs:
        if x in in_var_set:
            raise RuntimeError(
                "You are trying to replace variable '%s' through the "
                "`givens` parameter, but this variable is an input to your "
                "function. Replacing inputs is currently forbidden because it "
                "has no effect. One way to modify an input `x` to a function "
                "evaluating f(x) is to define a new input `y` and use "
                "`theano.function([y], f(x), givens={x: g(y)})`. Another "
                "solution consists in using `theano.clone`, e.g. like this: "
                "`theano.function([x], "
                "theano.clone(f(x), replace={x: g(x)}))`." % x
            )

    # Extend the outputs with the updates on input variables so they are also
    # cloned
    additional_outputs = [i.update for i in inputs if i.update]
    if outputs is None:
        out_list = []
    else:
        if isinstance(outputs, (list, tuple)):
            out_list = list(outputs)
        else:
            out_list = [outputs]
    extended_outputs = out_list + additional_outputs

    output_vars = rebuild_collect_shared(
        extended_outputs,
        in_variables,
        replace=givens,
        updates=updates,
        rebuild_strict=rebuild_strict,
        copy_inputs_over=True,
        no_default_updates=no_default_updates,
    )
    # extracting the arguments
    input_variables, cloned_extended_outputs, other_stuff = output_vars
    clone_d, update_d, update_expr, shared_inputs = other_stuff

    # Recover only the clones of the original outputs
    if outputs is None:
        cloned_outputs = []
    else:
        if isinstance(outputs, (list, tuple)):
            cloned_outputs = cloned_extended_outputs[: len(outputs)]
        else:
            cloned_outputs = cloned_extended_outputs[0]

    for i, iv in zip(inputs, input_variables):
        i.variable = iv

        # If needed, replace the input's update by its cloned equivalent
        if i.update:
            i.update = clone_d[i.update]

    for sv in shared_inputs:
        # pass value of None
        # value will be stored in the resulting functions' defaults
        # list but since the value of shared variables never needs to
        # be refed, it is not needed
        if sv in update_d:
            si = In(
                variable=sv,
                value=sv.container,
                mutable=True,
                borrow=True,
                update=update_d[sv],
                shared=True,
            )
        else:
            si = In(
                variable=sv, value=sv.container, mutable=False, borrow=True, shared=True
            )
        inputs.append(si)

    return orig_function(
        inputs,
        cloned_outputs,
        mode,
        accept_inplace=accept_inplace,
        name=name,
        profile=profile,
        on_unused_input=on_unused_input,
        output_keys=output_keys,
    )