Example #1
0
def clone(output,
          replace=None,
          strict=True,
          share_inputs=True,
          copy_inputs=DEPRECATED_ARG):
    """
    Function that allows replacing subgraphs of a computational graph.

    It returns a copy of the initial subgraph with the corresponding
    substitutions.

    Parameters
    ----------
    output : Theano Variables (or Theano expressions)
        Theano expression that represents the computational graph.
    replace : dict
        Dictionary describing which subgraphs should be replaced by what.
    share_inputs : bool
        If True, use the same inputs (and shared variables) as the original
        graph. If False, clone them. Note that cloned shared variables still
        use the same underlying storage, so they will always have the same
        value.
    copy_inputs
        Deprecated, use share_inputs.

    """
    if copy_inputs is not DEPRECATED_ARG:
        warnings.warn('In `clone()` function, the argument `copy_inputs` has been deprecated and renamed into `share_inputs`')
        assert share_inputs  # since we used `copy_inputs` we should have default value for `share_inputs`
        share_inputs = copy_inputs

    if isinstance(replace, dict):
        items = list(replace.items())
    elif isinstance(replace, (list, tuple)):
        items = replace
    elif replace is None:
        items = []
    else:
        raise ValueError(("replace is neither a dictionary, list, "
                          "tuple or None ! The value provided is %s,"
                          "of type %s")%(str(replace), str(type(replace))))
    tmp_replace = [(x, x.type()) for x, y in items]
    new_replace = [(x, y) for ((_, x), (_, y)) in zip(tmp_replace,
                                                           items)]
    _, _outs, _ = rebuild_collect_shared(output,
                                         [],
                                         tmp_replace,
                                         [],
                                         strict,
                                         share_inputs)

    # TODO Explain why we call it twice ?!
    _, outs, _ = rebuild_collect_shared(_outs,
                                        [],
                                        new_replace,
                                        [],
                                        strict,
                                        share_inputs)

    return outs
Example #2
0
def clone(output,
          replace=None,
          strict=True,
          share_inputs=True,
          copy_inputs=DEPRECATED_ARG):
    """
    Function that allows replacing subgraphs of a computational
    graph. It returns a copy of the initial subgraph with the corresponding
    substitutions.

    :type output: Theano Variables (or Theano expressions)
    :param outputs: Theano expression that represents the computational
                    graph

    :type replace: dict
    :param replace: dictionary describing which subgraphs should be
                    replaced by what

    :type share_inputs: bool
    :param share_inputs: If True, use the same inputs (and shared variables)
        as the original graph. If False, clone them. Note that cloned
        shared variables still use the same underlying storage, so they
        will always have the same value.

    :param copy_inputs: deprecated, use share_inputs.
    """
    if copy_inputs is not DEPRECATED_ARG:
        warnings.warn('In `clone()` function, the argument `copy_inputs` has been deprecated and renamed into `share_inputs`')
        assert share_inputs  # since we used `copy_inputs` we should have default value for `share_inputs`
        share_inputs = copy_inputs

    if isinstance(replace, dict):
        items = replace.items()
    elif isinstance(replace, (list, tuple)):
        items = replace
    elif replace is None:
        items = []
    else:
        raise ValueError(("replace is neither a dictionary, list, "
                          "tuple or None ! The value provided is %s,"
                          "of type %s")%(str(replace), str(type(replace))))
    tmp_replace = [(x, x.type()) for x, y in items]
    new_replace = [(x, y) for ((_, x), (_, y)) in zip(tmp_replace,
                                                           items)]
    _, _outs, _ = rebuild_collect_shared(output,
                                         [],
                                         tmp_replace,
                                         [],
                                         strict,
                                         share_inputs)

    # TODO Explain why we call it twice ?!
    _, outs, _ = rebuild_collect_shared(_outs,
                                        [],
                                        new_replace,
                                        [],
                                        strict,
                                        share_inputs)

    return outs
Example #3
0
def clone(output,
          replace=None,
          strict=True,
          copy_inputs=True):
    """
    Function that allows replacing subgraphs of a computational
    graph. It returns a copy of the initial subgraph with the corresponding
    substitutions.

    :type output: Theano Variables (or Theano expressions)
    :param outputs: Theano expression that represents the computational
                    graph

    :type replace: dict
    :param replace: dictionary describing which subgraphs should be
                    replaced by what

    :type copy_inputs: bool
    :param copy_inputs: If True, use the same inputs (and shared variables)
        as the original graph. If False, clone them. Note that cloned
        shared variables still use the same underlying storage, so they
        will always have the same value.
    """
    if isinstance(replace, dict):
        items = replace.items()
    elif isinstance(replace, (list, tuple)):
        items = replace
    elif replace is None:
        items = []
    else:
        raise ValueError(("replace is neither a dictionary, list, "
                          "tuple or None ! The value provided is %s,"
                          "of type %s")%(str(replace), str(type(replace))))
    tmp_replace = [(x, x.type()) for x, y in items]
    new_replace = [(x, y) for ((_, x), (_, y)) in zip(tmp_replace,
                                                           items)]
    _, _outs, _ = rebuild_collect_shared(output,
                                         [],
                                         tmp_replace,
                                         [],
                                         strict,
                                         copy_inputs)

    _, outs, _ = rebuild_collect_shared(_outs,
                                        [],
                                        new_replace,
                                        [],
                                        strict,
                                        copy_inputs)

    return outs
Example #4
0
def clone(output, replace=None, strict=True, share_inputs=True):
    """
    Function that allows replacing subgraphs of a computational
    graph. It returns a copy of the initial subgraph with the corresponding
    substitutions.

    :type output: Theano Variables (or Theano expressions)
    :param outputs: Theano expression that represents the computational
                    graph

    :type replace: dict
    :param replace: dictionary describing which subgraphs should be
                    replaced by what

    :type share_inputs: bool
    :param share_inputs: If True, use the same inputs (and shared variables)
        as the original graph. If False, clone them. Note that cloned
        shared variables still use the same underlying storage, so they
        will always have the same value.
    """
    inps, outs, other_stuff = rebuild_collect_shared(output,
                                                     [],
                                                     replace,
                                                     [],
                                                     strict,
                                                     share_inputs)
    return outs
Example #5
0
def clone(output,
          replace=None,
          strict=True,
          copy_inputs=True):
    """
    Function that allows replacing subgraphs of a computational
    graph. It returns a copy of the initial subgraph with the corresponding
    substitutions.

    :type output: Theano Variables (or Theano expressions)
    :param outputs: Theano expression that represents the computational
                    graph

    :type replace: dict
    :param replace: dictionary describing which subgraphs should be
                    replaced by what
    """

    inps, outs, other_stuff = rebuild_collect_shared(output,
                                                     [],
                                                     replace,
                                                     [],
                                                     strict,
                                                     copy_inputs
                                                     )
    return outs
Example #6
0
def clone(output, replace=None, strict=True, share_inputs=True, copy_inputs=DEPRECATED_ARG):
    """
    Function that allows replacing subgraphs of a computational
    graph. It returns a copy of the initial subgraph with the corresponding
    substitutions.

    :type output: Theano Variables (or Theano expressions)
    :param outputs: Theano expression that represents the computational
                    graph

    :type replace: dict
    :param replace: dictionary describing which subgraphs should be
                    replaced by what

    :type share_inputs: bool
    :param share_inputs: If True, use the same inputs (and shared variables)
        as the original graph. If False, clone them. Note that cloned
        shared variables still use the same underlying storage, so they
        will always have the same value.
    """
    if copy_inputs is not DEPRECATED_ARG:
        warnings.warn('In `clone()` function, the argument `copy_inputs` has been deprecated and renamed into `share_inputs`')
        assert share_inputs  # since we used `copy_inputs` we should have default value for `share_inputs`
        share_inputs = copy_inputs

    inps, outs, other_stuff = rebuild_collect_shared(output,
                                                     [],
                                                     replace,
                                                     [],
                                                     strict,
                                                     share_inputs)
    return outs
Example #7
0
def clone_optimized_graph(f):
    maker_ins = [x for x in f.maker.env.inputs
                 if not isinstance(x, theano.tensor.sharedvar.SharedVariable)]
    inps, outs, _ = rebuild_collect_shared(f.maker.env.outputs,
                                           maker_ins,
                                           copy_inputs_over=False)
    ins = [x for x in inps
           if not isinstance(x, theano.tensor.sharedvar.SharedVariable)]
    return (ins, outs)
Example #8
0
def clone(output, replace=None, strict=True, copy_inputs=True):
    """
    Function that allows replacing subgraphs of a computational
    graph. It returns a copy of the initial subgraph with the corresponding
    substitutions.

    :type output: Theano Variables (or Theano expressions)
    :param outputs: Theano expression that represents the computational
                    graph

    :type replace: dict
    :param replace: dictionary describing which subgraphs should be
                    replaced by what

    :type copy_inputs: bool
    :param copy_inputs: If True, use the same inputs (and shared variables)
        as the original graph. If False, clone them. Note that cloned
        shared variables still use the same underlying storage, so they
        will always have the same value.
    """
    if isinstance(replace, dict):
        items = replace.items()
    elif isinstance(replace, (list, tuple)):
        items = replace
    elif replace is None:
        items = []
    else:
        raise ValueError(("replace is neither a dictionary, list, "
                          "tuple or None ! The value provided is %s,"
                          "of type %s") % (str(replace), str(type(replace))))
    tmp_replace = [(x, x.type()) for x, y in items]
    new_replace = [(x, y) for ((_, x), (_, y)) in zip(tmp_replace, items)]
    _, _outs, _ = rebuild_collect_shared(output, [], tmp_replace, [], strict,
                                         copy_inputs)

    _, outs, _ = rebuild_collect_shared(_outs, [], new_replace, [], strict,
                                        copy_inputs)

    return outs
Example #9
0
def clone_optimized_graph(f):
    maker_ins = [
        x for x in f.maker.env.inputs
        if not isinstance(x, theano.tensor.sharedvar.SharedVariable)
    ]
    inps, outs, _ = rebuild_collect_shared(f.maker.env.outputs,
                                           maker_ins,
                                           copy_inputs_over=False)
    ins = [
        x for x in inps
        if not isinstance(x, theano.tensor.sharedvar.SharedVariable)
    ]
    return (ins, outs)
Example #10
0
def clone(output, replace=None, strict=True, copy_inputs=True):
    """
    Function that allows replacing subgraphs of a computational
    graph. It returns a copy of the initial subgraph with the corresponding
    substitutions.

    :type output: Theano Variables (or Theano expressions)
    :param outputs: Theano expression that represents the computational
                    graph

    :type replace: dict
    :param replace: dictionary describing which subgraphs should be
                    replaced by what
    """

    inps, outs, other_stuff = rebuild_collect_shared(output, [], replace, [],
                                                     strict, copy_inputs)
    return outs
Example #11
0
def clone(output, replace=None, strict=True, share_inputs=True):
    """
    Function that allows replacing subgraphs of a computational graph.
    
    It returns a copy of the initial subgraph with the corresponding
    substitutions.

    Parameters
    ----------
    output : Theano Variables (or Theano expressions)
        Theano expression that represents the computational graph.
    replace: dict
        Dictionary describing which subgraphs should be replaced by what.
    share_inputs : bool
        If True, use the same inputs (and shared variables) as the original
        graph. If False, clone them. Note that cloned shared variables still
        use the same underlying storage, so they will always have the same
        value.

    """
    inps, outs, other_stuff = rebuild_collect_shared(output, [], replace, [],
                                                     strict, share_inputs)
    return outs
Example #12
0
def scan(fn,
         sequences=None,
         outputs_info=None,
         non_sequences=None,
         n_steps=None,
         truncate_gradient=-1,
         go_backwards=False,
         mode=None,
         name=None,
         options=None,
         profile=False):
    """
    This function constructs and applies a Scan op to the provided
    arguments.

    :param fn:
        ``fn`` is a function that describes the operations involved in one
        step of ``scan``. ``fn`` should construct variables describing the
        output of one iteration step. It should expect as input theano
        variables representing all the slices of the input sequences
        and previous values of the outputs, as well as all other arguments
        given to scan as ``non_sequences``. The order in which scan passes
        these variables to ``fn``  is the following :

        * all time slices of the first sequence
        * all time slices of the second sequence
        * ...
        * all time slices of the last sequence
        * all past slices of the first output
        * all past slices of the second otuput
        * ...
        * all past slices of the last output
        * all other arguments (the list given as `non_sequences` to
            scan)

        The order of the sequences is the same as the one in the list
        `sequences` given to scan. The order of the outputs is the same
        as the order of ``output_info``. For any sequence or output the
        order of the time slices is the same as the one in which they have
        been given as taps. For example if one writes the following :

        .. code-block:: python

            scan(fn, sequences = [ dict(input= Sequence1, taps = [-3,2,-1])
                                 , Sequence2
                                 , dict(input =  Sequence3, taps = 3) ]
                   , outputs_info = [ dict(initial =  Output1, taps = [-3,-5])
                                    , dict(initial = Output2, taps = None)
                                    , Output3 ]
                   , non_sequences = [ Argument1, Argument 2])

        ``fn`` should expect the following arguments in this given order:

        #. ``Sequence1[t-3]``
        #. ``Sequence1[t+2]``
        #. ``Sequence1[t-1]``
        #. ``Sequence2[t]``
        #. ``Sequence3[t+3]``
        #. ``Output1[t-3]``
        #. ``Output1[t-5]``
        #. ``Output3[t-1]``
        #. ``Argument1``
        #. ``Argument2``

        The list of ``non_sequences`` can also contain shared variables
        used in the function, though ``scan`` is able to figure those
        out on its own so they can be skipped. For the clarity of the
        code we recommand though to provide them to scan. To some extend
        ``scan`` can also figure out other ``non sequences`` (not shared)
        even if not passed to scan (but used by `fn`). A simple example of
        this would be :

        .. code-block:: python

            import theano.tensor as TT
            W   = TT.matrix()
            W_2 = W**2
            def f(x):
                return TT.dot(x,W_2)

        The function is expected to return two things. One is a list of
        outputs ordered in the same order as ``outputs_info``, with the
        difference that there should be only one output variable per
        output initial state (even if no tap value is used). Secondly
        `fn` should return an update dictionary (that tells how to
        update any shared variable after each iteration step). The
        dictionary can optionally be given as a list of tuples. There is
        no constraint on the order of these two list, ``fn`` can return
        either ``(outputs_list, update_dictionary)`` or
        ``(update_dictionary, outputs_list)`` or just one of the two (in
        case the other is empty).

        To use ``scan`` as a while loop, the user needs to change the
        function ``fn`` such that also a stopping condition is returned.
        To do so, he/she needs to wrap the condition in an ``until`` class.
        The condition should be returned as a third element, for example:

        .. code-block:: python

            ...
            return [y1_t, y2_t], {x:x+1}, theano.scan_module.until(x < 50)

        Note that a number of steps (considered in here as the maximum
        number of steps ) is still required even though a condition is
        passed (and it is used to allocate memory if needed). = {}):

    :param sequences:
        ``sequences`` is the list of Theano variables or dictionaries
        describing the sequences ``scan`` has to iterate over. If a
        sequence is given as wrapped in a dictionary, then a set of optional
        information can be provided about the sequence. The dictionary
        should have the following keys:

        * ``input`` (*mandatory*) -- Theano variable representing the
          sequence.

        * ``taps`` -- Temporal taps of the sequence required by ``fn``.
          They are provided as a list of integers, where a value ``k``
          impiles that at iteration step ``t`` scan will pass to ``fn``
          the slice ``t+k``. Default value is ``[0]``

        Any Theano variable in the list ``sequences`` is automatically
        wrapped into a dictionary where ``taps`` is set to ``[0]``


    :param outputs_info:
        ``outputs_info`` is the list of Theano variables or dictionaries
        describing the initial state of the outputs computed
        recurrently. When this initial states are given as dictionary
        optional information can be provided about the output corresponding
        to these initial states. The dictionary should have the following
        keys:

        * ``initial`` -- Theano variable that represents the initial
          state of a given output. In case the output is not computed
          recursively (think of a map) and does not require a initial
          state this field can be skiped. Given that only the previous
          time step of the output is used by ``fn`` the initial state
          should have the same shape as the output. If multiple time
          taps are used, the initial state should have one extra
          dimension that should cover all the possible taps. For example
          if we use ``-5``, ``-2`` and ``-1`` as past taps, at step 0,
          ``fn`` will require (by an abuse of notation) ``output[-5]``,
          ``output[-2]`` and ``output[-1]``. This will be given by
          the initial state, which in this case should have the shape
          (5,)+output.shape. If this variable containing the initial
          state is called ``init_y`` then ``init_y[0]`` *corresponds to*
          ``output[-5]``. ``init_y[1]`` *correponds to* ``output[-4]``,
          ``init_y[2]`` corresponds to ``output[-3]``, ``init_y[3]``
          coresponds to ``output[-2]``, ``init_y[4]`` corresponds to
          ``output[-1]``. While this order might seem strange, it comes
          natural from splitting an array at a given point. Assume that
          we have a array ``x``, and we choose ``k`` to be time step
          ``0``. Then our initial state would be ``x[:k]``, while the
          output will be ``x[k:]``. Looking at this split, elements in
          ``x[:k]`` are ordered exactly like those in ``init_y``.
        * ``taps`` -- Temporal taps of the output that will be pass to
          ``fn``. They are provided as a list of *negative* integers,
          where a value ``k`` implies that at iteration step ``t`` scan
          will pass to ``fn`` the slice ``t+k``.

        ``scan`` will follow this logic if partial information is given:

        * If an output is not wrapped in a dictionary, ``scan`` will wrap
          it in one assuming that you use only the last step of the output
          (i.e. it makes your tap value list equal to [-1]).
        * If you wrap an output in a dictionary and you do not provide any
          taps but you provide an initial state it will assume that you are
          using only a tap value of -1.
        * If you wrap an output in a dictionary but you do not provide any
          initial state, it assumes that you are not using any form of
          taps.
        * If you provide a ``None`` instead of a variable or a empty
          dictionary ``scan`` assumes that you will not use any taps for
          this output (like for example in case of a map)

        If ``outputs_info`` is an empty list or None, ``scan`` assumes
        that no tap is used for any of the outputs. If information is
        provided just for a subset of the outputs an exception is
        raised (because there is no convention on how scan should map
        the provided information to the outputs of ``fn``)


    :param non_sequences:
        ``non_sequences`` is the list of arguments that are passed to
        ``fn`` at each steps. One can opt to exclude variable
        used in ``fn`` from this list as long as they are part of the
        computational graph, though for clarity we encourage not to do so.


    :param n_steps:
        ``n_steps`` is the number of steps to iterate given as an int
        or Theano scalar. If any of the input sequences do not have
        enough elements, scan will raise an error. If the *value is 0* the
        outputs will have *0 rows*. If the value is negative, ``scan``
        will run backwards in time. If the ``go_backwards`` flag is already
        set and also ``n_steps`` is negative, ``scan`` will run forward
        in time. If n stpes is not provided, ``scan`` will figure
        out the amount of steps it should run given its input sequences.


    :param truncate_gradient:
        ``truncate_gradient`` is the number of steps to use in truncated
        BPTT.  If you compute gradients through a scan op, they are
        computed using backpropagation through time. By providing a
        different value then -1, you choose to use truncated BPTT instead
        of classical BPTT, where you go for only ``truncate_gradient``
        number of steps back in time.


    :param go_backwards:
        ``go_backwards`` is a flag indicating if ``scan`` should go
        backwards through the sequences. If you think of each sequence
        as indexed by time, making this flag True would mean that
        ``scan`` goes back in time, namely that for any sequence it
        starts from the end and goes towards 0.


    :param name:
        When profiling ``scan``, it is crucial to provide a name for any
        instance of ``scan``. The profiler will produce an overall
        profile of your code as well as profiles for the computation of
        one step of each instance of ``scan``. The ``name`` of the instance
        appears in those profiles and can greatly help to disambiguate
        information.

    :param mode:
        It is recommended to leave this argument to None, especially
        when profiling ``scan`` (otherwise the results are not going to
        be accurate). If you prefer the computations of one step of
        ``scan`` to be done differently then the entire function, you
        can use this parameter to describe how the computations in this
        loop are done (see ``theano.function`` for details about
        possible values and their meaning).

    :param profile:
        Flag or string. If true, or different from the empty string, a
        profile object will be created and attached to the inner graph of
        scan. In case ``profile`` is True, the profile object will have the
        name of the scan instance, otherwise it will have the passed string.
        Profile object collect (and print) information only when running the
        inner graph with the new cvm linker ( with default modes,
        other linkers this argument is useless)

    :rtype: tuple
    :return: tuple of the form (outputs, updates); ``outputs`` is either a
             Theano variable or a list of Theano variables representing the
             outputs of ``scan`` (in the same order as in
             ``outputs_info``). ``updates`` is a subclass of dictionary
             specifying the
             update rules for all shared variables used in scan
             This dictionary should be passed to ``theano.function`` when
             you compile your function. The change compared to a normal
             dictionary is that we validate that keys are SharedVariable
             and addition of those dictionary are validated to be consistent.
    """
    # Note : see the internal documentation of the scan op for naming
    # conventions and all other details
    if options is None:
        options = {}
    rvals = scan_utils.canonical_arguments(sequences,
                                           outputs_info,
                                           non_sequences,
                                           go_backwards,
                                           n_steps)
    inputs, states_and_outputs_info, parameters, T = rvals
    # If we provided a known number of steps ( before compilation)
    # and if that number is 1 or -1, then we can skip the Scan Op,
    # and just apply the inner function once
    # To do that we check here to see the nature of n_steps
    T_value = None
    if isinstance(n_steps, (float, int)):
        T_value = int(n_steps)
    else:
        try:
            T_value = opt.get_constant_value(n_steps)
        except (TypeError, AttributeError):
            T_value = None

    if T_value in (1, -1):
        return one_step_scan(fn,
                             inputs,
                             states_and_outputs_info,
                             parameters,
                             truncate_gradient)

    # 1. Variable representing the current time step
    t = scalar_shared(numpy.int64(0), name='t')

    # 2. Allocate memory for the states of scan.
    mintaps = []
    lengths = []
    for pos, arg_info in enumerate(states_and_outputs_info):
        if arg_info.get('taps', None) == [-1]:
            mintaps.append(1)
            lengths.append(scalar_shared(numpy.int64(0),
                                         name='l%d' % pos))
            arg_info['initial'] = scan_utils.expand(tensor.unbroadcast(
                    tensor.shape_padleft(arg_info['initial']), 0), T)
        elif arg_info.get('taps', None):
            if numpy.any(numpy.array(arg_info.get('taps', [])) > 0):
                # Make sure we do not have requests for future values of a
                # sequence we can not provide such values
                raise ValueError('Can not use future taps of outputs',
                                 arg_info)
            mintap = abs(numpy.min(arg_info['taps']))
            lengths.append(scalar_shared(numpy.int64(0),
                                         name='l%d' % pos))
            mintaps.append(mintap)
            arg_info['initial'] = scan_utils.expand(
                arg_info['initial'][:mintap], T)
        else:
            mintaps.append(0)
            lengths.append(scalar_shared(numpy.int64(0),
                                         name='l%d' % pos))

    # 3. Generate arguments for the function passed to scan. This will
    # function will return the outputs that need to be computed at every
    # timesteps
    inputs_slices = [input[t] for input in inputs]
    states_slices = []
    for n, state in enumerate(states_and_outputs_info):
        # Check if it is actually a state and not an output
        if mintaps[n] != 0:
            for k in state['taps']:
                states_slices.append(
                    state['initial'][(t + mintaps[n] + k) % lengths[n]])

    # 4. Construct outputs that are to be computed by the inner
    # function of scan
    args = inputs_slices + states_slices + parameters
    cond, states_and_outputs, updates = \
            scan_utils.get_updates_and_outputs(fn(*args))

    # User is allowed to provide no information if it only behaves like a
    # map
    if (len(states_and_outputs) != len(states_and_outputs_info) and
        len(states_and_outputs_info) == 0):
        mintaps = [0] * len(states_and_outputs)

    # 5. Construct the scan op
    # 5.1 Construct list of shared variables with updates (those that
    # can be treated as states (i.e. of TensorType) and those that can not
    # (like Random States)

    if cond is not None:
        _cond = [cond]
    else:
        _cond = []
    rvals = rebuild_collect_shared(
        states_and_outputs + _cond,
        updates=updates,
        rebuild_strict=True,
        copy_inputs_over=True,
        no_default_updates=False)

    # extracting the arguments
    input_variables, cloned_outputs, other_rval = rvals
    clone_d, update_d, update_expr, shared_inputs = other_rval
    additional_input_states = []
    additional_output_states = []
    additional_lengths = []
    additional_mintaps = []
    original_numeric_shared_variables = []

    non_numeric_input_states = []
    non_numeric_output_states = []
    original_non_numeric_shared_variables = []
    pos = len(lengths)
    for sv in shared_inputs:
        if sv in update_d:
            if isinstance(sv, (TensorVariable, TensorSharedVariable)):
                # We can treat it as a sit sot
                nw_state = scan_utils.expand(
                    tensor.unbroadcast(tensor.shape_padleft(sv), 0), T)
                additional_lengths.append(scalar_shared(numpy.int64(0),
                                                       name='l%d' % pos))
                pos = pos + 1
                additional_mintaps.append(1)
                additional_input_states.append(nw_state)
                additional_output_states.append(
                    scan_utils.clone(tensor.set_subtensor(
                        nw_state[(t + 1) % additional_lengths[-1]],
                        update_d[sv])))
                original_numeric_shared_variables.append(sv)
            else:
                non_numeric_input_states.append(sv)
                non_numeric_output_states.append(update_d[sv])
                original_non_numeric_shared_variables.append(sv)

    # Replace shared variables in the update
    _additional_output_states = []
    replace = {}
    for sv, buf in zip(original_numeric_shared_variables,
                       additional_input_states):
        replace[sv] = buf[t]
    for out in additional_output_states:
        _additional_output_states.append(
            scan_utils.clone(out, replace=replace))
    additional_output_states = _additional_output_states

    # 5.2 Collect inputs/outputs of the inner function
    inputs = []
    outputs = []
    for n, mintap in enumerate(mintaps):
        if mintap != 0:
            input_state = states_and_outputs_info[n]['initial']
            inputs.append(input_state)
            outputs.append(
                tensor.set_subtensor(
                    input_state[(t + mintap) % lengths[n]],
                    states_and_outputs[n]))
        else:
            mem_buffer = scan_utils.allocate_memory(
                T, states_and_outputs_info[n], states_and_outputs[n])
            inputs.append(output)
            outputs.append(
                tensor.set_subtensor(output[t % lengths[n]],
                                     states_and_outputs[n]))
    inputs.extend(additional_input_states)
    outputs.extend(additional_output_states)
    lengths.extend(additional_lengths)
    mintaps.extend(additional_mintaps)
    inputs.extend(non_numeric_input_states)
    outputs.extend(non_numeric_output_states)
    all_other_inputs = gof.graph.inputs(outputs)
    parameters = [x for x in all_other_inputs
                  if (x not in inputs and x not in lengths and x is not t
                      and isinstance(x, gof.Variable) and
                      not isinstance(x, gof.Constant))]
    inputs.extend(parameters)
    # 5.3 Construct the the options dictionary
    options['name'] = name
    options['profile'] = profile
    options['mode'] = mode
    options['inplace'] = False
    options['gpu'] = False
    options['truncate_gradient'] = truncate_gradient
    options['hash_inner_graph'] = 0
    # 5.4 Construct the ScanOp instance
    local_op = scan_op.ScanOp(inputs=inputs,
                              outputs=outputs,
                              lengths=lengths,
                              switches=[],
                              mintaps=mintaps,
                              index=t,
                              options=options,
                              as_repeatUntil=cond)
    # Note that we get here all the outputs followed by the update rules to
    # the shared variables we had in our scan
    # we know that we have (in this given order):
    #   * len(states_and_outputs) real outputs
    #   * len(additional_input_states) updates for numeric shared variable
    #   * len(non_numeric_input_states) updates for non numeric shared
    #   variables
    scan_inputs = [T] + inputs
    scan_outputs_update_rules = scan_utils.to_list(local_op(*scan_inputs))
    # 5.5 Collect outputs and add permutation object
    scan_outputs = []
    for pos in xrange(len(states_and_outputs)):
        out = scan_utils.ScanPermutation(mintaps[pos])(
            scan_outputs_update_rules[pos], t)
        scan_outputs.append(out[mintaps[pos]:])
    # 5.6 Construct updates dictionary
    update_rules = scan_outputs_update_rules[len(states_and_outputs):]
    updates = {}
    for v, u in izip(original_numeric_shared_variables,
                     update_rules[:len(additional_input_states)]):
        updates[v] = u[-1]
    for v, u in izip(original_non_numeric_shared_variables,
                     update_rules[len(additional_input_states):]):
        updates[v] = u
    # Step 5.7 We are done and can return everything back to the user
    return scan_outputs, updates
Example #13
0
def scan(fn,
         sequences=None,
         outputs_info=None,
         non_sequences=None,
         n_steps=None,
         truncate_gradient=-1,
         go_backwards=False,
         mode=None,
         name=None,
         options=None,
         profile=False):
    """
    This function constructs and applies a Scan op to the provided
    arguments.

    :param fn:
        ``fn`` is a function that describes the operations involved in one
        step of ``scan``. ``fn`` should construct variables describing the
        output of one iteration step. It should expect as input theano
        variables representing all the slices of the input sequences
        and previous values of the outputs, as well as all other arguments
        given to scan as ``non_sequences``. The order in which scan passes
        these variables to ``fn``  is the following :

        * all time slices of the first sequence
        * all time slices of the second sequence
        * ...
        * all time slices of the last sequence
        * all past slices of the first output
        * all past slices of the second otuput
        * ...
        * all past slices of the last output
        * all other arguments (the list given as `non_sequences` to
            scan)

        The order of the sequences is the same as the one in the list
        `sequences` given to scan. The order of the outputs is the same
        as the order of ``outputs_info``. For any sequence or output the
        order of the time slices is the same as the one in which they have
        been given as taps. For example if one writes the following :

        .. code-block:: python

            scan(fn, sequences = [ dict(input= Sequence1, taps = [-3,2,-1])
                                 , Sequence2
                                 , dict(input =  Sequence3, taps = 3) ]
                   , outputs_info = [ dict(initial =  Output1, taps = [-3,-5])
                                    , dict(initial = Output2, taps = None)
                                    , Output3 ]
                   , non_sequences = [ Argument1, Argument 2])

        ``fn`` should expect the following arguments in this given order:

        #. ``Sequence1[t-3]``
        #. ``Sequence1[t+2]``
        #. ``Sequence1[t-1]``
        #. ``Sequence2[t]``
        #. ``Sequence3[t+3]``
        #. ``Output1[t-3]``
        #. ``Output1[t-5]``
        #. ``Output3[t-1]``
        #. ``Argument1``
        #. ``Argument2``

        The list of ``non_sequences`` can also contain shared variables
        used in the function, though ``scan`` is able to figure those
        out on its own so they can be skipped. For the clarity of the
        code we recommend though to provide them to scan. To some extend
        ``scan`` can also figure out other ``non sequences`` (not shared)
        even if not passed to scan (but used by `fn`). A simple example of
        this would be :

        .. code-block:: python

            import theano.tensor as TT
            W   = TT.matrix()
            W_2 = W**2
            def f(x):
                return TT.dot(x,W_2)

        The function is expected to return two things. One is a list of
        outputs ordered in the same order as ``outputs_info``, with the
        difference that there should be only one output variable per
        output initial state (even if no tap value is used). Secondly
        `fn` should return an update dictionary (that tells how to
        update any shared variable after each iteration step). The
        dictionary can optionally be given as a list of tuples. There is
        no constraint on the order of these two list, ``fn`` can return
        either ``(outputs_list, update_dictionary)`` or
        ``(update_dictionary, outputs_list)`` or just one of the two (in
        case the other is empty).

        To use ``scan`` as a while loop, the user needs to change the
        function ``fn`` such that also a stopping condition is returned.
        To do so, he/she needs to wrap the condition in an ``until`` class.
        The condition should be returned as a third element, for example:

        .. code-block:: python

            ...
            return [y1_t, y2_t], {x:x+1}, theano.scan_module.until(x < 50)

        Note that a number of steps (considered in here as the maximum
        number of steps ) is still required even though a condition is
        passed (and it is used to allocate memory if needed). = {}):

    :param sequences:
        ``sequences`` is the list of Theano variables or dictionaries
        describing the sequences ``scan`` has to iterate over. If a
        sequence is given as wrapped in a dictionary, then a set of optional
        information can be provided about the sequence. The dictionary
        should have the following keys:

        * ``input`` (*mandatory*) -- Theano variable representing the
          sequence.

        * ``taps`` -- Temporal taps of the sequence required by ``fn``.
          They are provided as a list of integers, where a value ``k``
          impiles that at iteration step ``t`` scan will pass to ``fn``
          the slice ``t+k``. Default value is ``[0]``

        Any Theano variable in the list ``sequences`` is automatically
        wrapped into a dictionary where ``taps`` is set to ``[0]``


    :param outputs_info:
        ``outputs_info`` is the list of Theano variables or dictionaries
        describing the initial state of the outputs computed
        recurrently. When this initial states are given as dictionary
        optional information can be provided about the output corresponding
        to these initial states. The dictionary should have the following
        keys:

        * ``initial`` -- Theano variable that represents the initial
          state of a given output. In case the output is not computed
          recursively (think of a map) and does not require a initial
          state this field can be skiped. Given that only the previous
          time step of the output is used by ``fn`` the initial state
          should have the same shape as the output. If multiple time
          taps are used, the initial state should have one extra
          dimension that should cover all the possible taps. For example
          if we use ``-5``, ``-2`` and ``-1`` as past taps, at step 0,
          ``fn`` will require (by an abuse of notation) ``output[-5]``,
          ``output[-2]`` and ``output[-1]``. This will be given by
          the initial state, which in this case should have the shape
          (5,)+output.shape. If this variable containing the initial
          state is called ``init_y`` then ``init_y[0]`` *corresponds to*
          ``output[-5]``. ``init_y[1]`` *correponds to* ``output[-4]``,
          ``init_y[2]`` corresponds to ``output[-3]``, ``init_y[3]``
          coresponds to ``output[-2]``, ``init_y[4]`` corresponds to
          ``output[-1]``. While this order might seem strange, it comes
          natural from splitting an array at a given point. Assume that
          we have a array ``x``, and we choose ``k`` to be time step
          ``0``. Then our initial state would be ``x[:k]``, while the
          output will be ``x[k:]``. Looking at this split, elements in
          ``x[:k]`` are ordered exactly like those in ``init_y``.
        * ``taps`` -- Temporal taps of the output that will be pass to
          ``fn``. They are provided as a list of *negative* integers,
          where a value ``k`` implies that at iteration step ``t`` scan
          will pass to ``fn`` the slice ``t+k``.

        ``scan`` will follow this logic if partial information is given:

        * If an output is not wrapped in a dictionary, ``scan`` will wrap
          it in one assuming that you use only the last step of the output
          (i.e. it makes your tap value list equal to [-1]).
        * If you wrap an output in a dictionary and you do not provide any
          taps but you provide an initial state it will assume that you are
          using only a tap value of -1.
        * If you wrap an output in a dictionary but you do not provide any
          initial state, it assumes that you are not using any form of
          taps.
        * If you provide a ``None`` instead of a variable or a empty
          dictionary ``scan`` assumes that you will not use any taps for
          this output (like for example in case of a map)

        If ``outputs_info`` is an empty list or None, ``scan`` assumes
        that no tap is used for any of the outputs. If information is
        provided just for a subset of the outputs an exception is
        raised (because there is no convention on how scan should map
        the provided information to the outputs of ``fn``)


    :param non_sequences:
        ``non_sequences`` is the list of arguments that are passed to
        ``fn`` at each steps. One can opt to exclude variable
        used in ``fn`` from this list as long as they are part of the
        computational graph, though for clarity we encourage not to do so.


    :param n_steps:
        ``n_steps`` is the number of steps to iterate given as an int
        or Theano scalar. If any of the input sequences do not have
        enough elements, scan will raise an error. If the *value is 0* the
        outputs will have *0 rows*. If the value is negative, ``scan``
        will run backwards in time. If the ``go_backwards`` flag is already
        set and also ``n_steps`` is negative, ``scan`` will run forward
        in time. If n stpes is not provided, ``scan`` will figure
        out the amount of steps it should run given its input sequences.


    :param truncate_gradient:
        ``truncate_gradient`` is the number of steps to use in truncated
        BPTT.  If you compute gradients through a scan op, they are
        computed using backpropagation through time. By providing a
        different value then -1, you choose to use truncated BPTT instead
        of classical BPTT, where you go for only ``truncate_gradient``
        number of steps back in time.


    :param go_backwards:
        ``go_backwards`` is a flag indicating if ``scan`` should go
        backwards through the sequences. If you think of each sequence
        as indexed by time, making this flag True would mean that
        ``scan`` goes back in time, namely that for any sequence it
        starts from the end and goes towards 0.


    :param name:
        When profiling ``scan``, it is crucial to provide a name for any
        instance of ``scan``. The profiler will produce an overall
        profile of your code as well as profiles for the computation of
        one step of each instance of ``scan``. The ``name`` of the instance
        appears in those profiles and can greatly help to disambiguate
        information.

    :param mode:
        It is recommended to leave this argument to None, especially
        when profiling ``scan`` (otherwise the results are not going to
        be accurate). If you prefer the computations of one step of
        ``scan`` to be done differently then the entire function, you
        can use this parameter to describe how the computations in this
        loop are done (see ``theano.function`` for details about
        possible values and their meaning).

    :param profile:
        Flag or string. If true, or different from the empty string, a
        profile object will be created and attached to the inner graph of
        scan. In case ``profile`` is True, the profile object will have the
        name of the scan instance, otherwise it will have the passed string.
        Profile object collect (and print) information only when running the
        inner graph with the new cvm linker ( with default modes,
        other linkers this argument is useless)

    :rtype: tuple
    :return: tuple of the form (outputs, updates); ``outputs`` is either a
             Theano variable or a list of Theano variables representing the
             outputs of ``scan`` (in the same order as in
             ``outputs_info``). ``updates`` is a subclass of dictionary
             specifying the
             update rules for all shared variables used in scan
             This dictionary should be passed to ``theano.function`` when
             you compile your function. The change compared to a normal
             dictionary is that we validate that keys are SharedVariable
             and addition of those dictionary are validated to be consistent.
    """
    # Note : see the internal documentation of the scan op for naming
    # conventions and all other details
    if options is None:
        options = {}
    rvals = scan_utils.canonical_arguments(sequences,
                                           outputs_info,
                                           non_sequences,
                                           go_backwards,
                                           n_steps)
    inputs, states_and_outputs_info, parameters, T = rvals
    # If we provided a known number of steps ( before compilation)
    # and if that number is 1 or -1, then we can skip the Scan Op,
    # and just apply the inner function once
    # To do that we check here to see the nature of n_steps
    T_value = None
    if isinstance(n_steps, (float, int)):
        T_value = int(n_steps)
    else:
        try:
            T_value = opt.get_scalar_constant_value(n_steps)
        except (TypeError, AttributeError):
            T_value = None

    if T_value in (1, -1):
        return one_step_scan(fn,
                             inputs,
                             states_and_outputs_info,
                             parameters,
                             truncate_gradient)

    # 1. Variable representing the current time step
    t = scalar_shared(numpy.int64(0), name='t')

    # 2. Allocate memory for the states of scan.
    mintaps = []
    lengths = []
    for pos, arg_info in enumerate(states_and_outputs_info):
        if arg_info.get('taps', None) == [-1]:
            mintaps.append(1)
            lengths.append(scalar_shared(numpy.int64(0),
                                         name='l%d' % pos))
            arg_info['initial'] = scan_utils.expand(tensor.unbroadcast(
                    tensor.shape_padleft(arg_info['initial']), 0), T)
        elif arg_info.get('taps', None):
            if numpy.any(numpy.array(arg_info.get('taps', [])) > 0):
                # Make sure we do not have requests for future values of a
                # sequence we can not provide such values
                raise ValueError('Can not use future taps of outputs',
                                 arg_info)
            mintap = abs(numpy.min(arg_info['taps']))
            lengths.append(scalar_shared(numpy.int64(0),
                                         name='l%d' % pos))
            mintaps.append(mintap)
            arg_info['initial'] = scan_utils.expand(
                arg_info['initial'][:mintap], T)
        else:
            mintaps.append(0)
            lengths.append(scalar_shared(numpy.int64(0),
                                         name='l%d' % pos))

    # 3. Generate arguments for the function passed to scan. This will
    # function will return the outputs that need to be computed at every
    # timesteps
    inputs_slices = [input[t] for input in inputs]
    states_slices = []
    for n, state in enumerate(states_and_outputs_info):
        # Check if it is actually a state and not an output
        if mintaps[n] != 0:
            for k in state['taps']:
                states_slices.append(
                    state['initial'][(t + mintaps[n] + k) % lengths[n]])

    # 4. Construct outputs that are to be computed by the inner
    # function of scan
    args = inputs_slices + states_slices + parameters
    cond, states_and_outputs, updates = \
            scan_utils.get_updates_and_outputs(fn(*args))

    # User is allowed to provide no information if it only behaves like a
    # map
    if (len(states_and_outputs) != len(states_and_outputs_info) and
        len(states_and_outputs_info) == 0):
        mintaps = [0] * len(states_and_outputs)

    # 5. Construct the scan op
    # 5.1 Construct list of shared variables with updates (those that
    # can be treated as states (i.e. of TensorType) and those that can not
    # (like Random States)

    if cond is not None:
        _cond = [cond]
    else:
        _cond = []
    rvals = rebuild_collect_shared(
        states_and_outputs + _cond,
        updates=updates,
        rebuild_strict=True,
        copy_inputs_over=True,
        no_default_updates=False)

    # extracting the arguments
    input_variables, cloned_outputs, other_rval = rvals
    clone_d, update_d, update_expr, shared_inputs = other_rval
    additional_input_states = []
    additional_output_states = []
    additional_lengths = []
    additional_mintaps = []
    original_numeric_shared_variables = []

    non_numeric_input_states = []
    non_numeric_output_states = []
    original_non_numeric_shared_variables = []
    pos = len(lengths)
    for sv in shared_inputs:
        if sv in update_d:
            if isinstance(sv, (TensorVariable, TensorSharedVariable)):
                # We can treat it as a sit sot
                nw_state = scan_utils.expand(
                    tensor.unbroadcast(tensor.shape_padleft(sv), 0), T)
                additional_lengths.append(scalar_shared(numpy.int64(0),
                                                       name='l%d' % pos))
                pos = pos + 1
                additional_mintaps.append(1)
                additional_input_states.append(nw_state)
                additional_output_states.append(
                    scan_utils.clone(tensor.set_subtensor(
                        nw_state[(t + 1) % additional_lengths[-1]],
                        update_d[sv])))
                original_numeric_shared_variables.append(sv)
            else:
                non_numeric_input_states.append(sv)
                non_numeric_output_states.append(update_d[sv])
                original_non_numeric_shared_variables.append(sv)

    # Replace shared variables in the update
    _additional_output_states = []
    replace = {}
    for sv, buf in zip(original_numeric_shared_variables,
                       additional_input_states):
        replace[sv] = buf[t]
    for out in additional_output_states:
        _additional_output_states.append(
            scan_utils.clone(out, replace=replace))
    additional_output_states = _additional_output_states

    # 5.2 Collect inputs/outputs of the inner function
    inputs = []
    outputs = []
    for n, mintap in enumerate(mintaps):
        if mintap != 0:
            input_state = states_and_outputs_info[n]['initial']
            inputs.append(input_state)
            outputs.append(
                tensor.set_subtensor(
                    input_state[(t + mintap) % lengths[n]],
                    states_and_outputs[n]))
        else:
            mem_buffer = scan_utils.allocate_memory(
                T, states_and_outputs_info[n], states_and_outputs[n])
            inputs.append(output)
            outputs.append(
                tensor.set_subtensor(output[t % lengths[n]],
                                     states_and_outputs[n]))
    inputs.extend(additional_input_states)
    outputs.extend(additional_output_states)
    lengths.extend(additional_lengths)
    mintaps.extend(additional_mintaps)
    inputs.extend(non_numeric_input_states)
    outputs.extend(non_numeric_output_states)
    all_other_inputs = gof.graph.inputs(outputs)
    parameters = [x for x in all_other_inputs
                  if (x not in inputs and x not in lengths and x is not t
                      and isinstance(x, gof.Variable) and
                      not isinstance(x, gof.Constant))]
    inputs.extend(parameters)
    # 5.3 Construct the the options dictionary
    options['name'] = name
    options['profile'] = profile
    options['mode'] = mode
    options['inplace'] = False
    options['gpu'] = False
    options['truncate_gradient'] = truncate_gradient
    options['hash_inner_graph'] = 0
    # 5.4 Construct the ScanOp instance
    local_op = scan_op.ScanOp(inputs=inputs,
                              outputs=outputs,
                              lengths=lengths,
                              switches=[],
                              mintaps=mintaps,
                              index=t,
                              options=options,
                              as_repeatUntil=cond)
    # Note that we get here all the outputs followed by the update rules to
    # the shared variables we had in our scan
    # we know that we have (in this given order):
    #   * len(states_and_outputs) real outputs
    #   * len(additional_input_states) updates for numeric shared variable
    #   * len(non_numeric_input_states) updates for non numeric shared
    #   variables
    scan_inputs = [T] + inputs
    scan_outputs_update_rules = scan_utils.to_list(local_op(*scan_inputs))
    # 5.5 Collect outputs and add permutation object
    scan_outputs = []
    for pos in xrange(len(states_and_outputs)):
        out = scan_utils.ScanPermutation(mintaps[pos])(
            scan_outputs_update_rules[pos], t)
        scan_outputs.append(out[mintaps[pos]:])
    # 5.6 Construct updates dictionary
    update_rules = scan_outputs_update_rules[len(states_and_outputs):]
    updates = {}
    for v, u in izip(original_numeric_shared_variables,
                     update_rules[:len(additional_input_states)]):
        updates[v] = u[-1]
    for v, u in izip(original_non_numeric_shared_variables,
                     update_rules[len(additional_input_states):]):
        updates[v] = u
    # Step 5.7 We are done and can return everything back to the user
    return scan_outputs, updates
Example #14
0
    def __init__(
        self,
        inputs,
        outputs,
        inline=False,
        lop_overrides="default",
        grad_overrides="default",
        rop_overrides="default",
        connection_pattern=None,
        name=None,
        **kwargs,
    ):
        if not isinstance(outputs, list):
            raise TypeError("outputs must be list, got %s" % type(outputs))
        for i in inputs + outputs:
            if not isinstance(i, gof.Variable):
                raise TypeError(
                    "inputs and outputs must be Variable instances", i)
        if "updates" in kwargs or "givens" in kwargs:
            raise TypeError("updates and givens are not allowed here")
        self.is_inline = inline
        # To correctly support shared variables the inner fct should
        # not see them. Otherwise there is a problem with the gradient.
        self.shared_inputs = [
            var for var in gof.graph.inputs(outputs)
            if isinstance(var, SharedVariable)
        ]
        shared_vars = [var.type() for var in self.shared_inputs]

        new = rebuild_collect_shared(
            outputs,
            inputs=inputs + shared_vars,
            replace=dict(zip(self.shared_inputs, shared_vars)),
            copy_inputs_over=False,
        )
        (
            local_inputs,
            local_outputs,
            [clone_d, update_d, update_expr, shared_inputs],
        ) = new
        assert len(local_inputs) == len(inputs) + len(self.shared_inputs)
        assert len(local_outputs) == len(outputs)
        assert not update_d
        assert not update_expr
        assert not shared_inputs

        self.local_inputs = local_inputs
        self.local_outputs = local_outputs
        self.inputs = inputs
        self.outputs = outputs
        self.kwargs = kwargs
        self.input_types = [inp.type for inp in inputs]
        self.output_types = [out.type for out in outputs]
        if lop_overrides != "default":
            if grad_overrides != "default":
                raise ValueError(
                    "lop_overrides and grad_overrides are mutually exclusive")
            else:
                self.set_lop_overrides(lop_overrides)
                self._lop_type = "lop"
        elif grad_overrides != "default":
            self.set_lop_overrides(grad_overrides)
            self._lop_type = "grad"
        else:
            self.set_lop_overrides("default")
            self._lop_type = "lop"
        self.set_rop_overrides(rop_overrides)

        self._connection_pattern = connection_pattern

        if name is not None:
            assert isinstance(name, str), "name must be None or string object"
        self.name = name
Example #15
0
    def __init__(self, updated_vars, givens=None):
        """
        updated_vars: sequence of (dst, expr) pairs
        vals_memo: dict Variable -> [value]

        """

        # -- unique_outputs is used here to ensure that there is some
        #    double-buffering going on, because actually dests and outputs can
        #    include some of the same variables (e.g. swap values)
        dests, outputs = zip(*updated_vars)
        #unique_outputs = map(deep_copy_op, outputs)
        unique_outputs = outputs

        # -- partial graph clone to use givens
        stuff = rebuild_collect_shared(
            unique_outputs,
            inputs=list(dests) + [],
            replace=givens,
            rebuild_strict=True,
            copy_inputs_over=True)
        _inputs, unique_outputs_w_giv, other_stuff = stuff
        clone_equiv1, _update_d, _update_expr, _shared_inputs = other_stuff

        all_inputs = theano.gof.graph.inputs(unique_outputs_w_giv + _inputs)

        # -- full graph clone to protect original graph
        clone_equiv = {} # -- do not need order here
        theano.gof.graph.clone_get_equiv(
            [],
            unique_outputs_w_giv + _inputs,
            copy_inputs_and_orphans=True,
            memo=clone_equiv)
        # -- redirect through the second clone
        for orig_var in clone_equiv1:
            tmp = clone_equiv1[orig_var]
            if tmp in clone_equiv:
                clone_equiv[orig_var] = clone_equiv[tmp]
        self.cloned_inputs = [clone_equiv[var] for var in all_inputs]
        self.cloned_dests = [clone_equiv[var] for var in dests]
        self.cloned_outputs = [clone_equiv[var] for var in unique_outputs_w_giv]
        fgraph = theano.gof.fg.FunctionGraph(
            self.cloned_inputs,
            self.cloned_outputs)

        # -- load up fgraph with features necessary to maintain correctness:
        for node in fgraph.apply_nodes:
            if getattr(node.op, 'destroy_map', None):
                if not accept_inplace:
                    raise TypeError("Graph must not contain inplace operations",
                                    node, node.op)
                else:
                    fgraph.attach_feature(theano.gof.DestroyHandler())
                    break

        # We need to protect all immutable inputs from inplace operations.
        fgraph.attach_feature(
                theano.compile.function_module.Supervisor(invar
                    for invar in self.cloned_inputs
                    if not ((invar in self.cloned_dests) or
                            (hasattr(fgraph, 'destroyers') and
                                fgraph.destroyers(input)))))

        # If named nodes are replaced, keep the name
        for feature in theano.compile.function_module.std_fgraph.features:
            fgraph.attach_feature(feature())

        fgraph.attach_feature(theano.tensor.opt.ShapeFeature())

        # -- pre-install the shape information from the Hints created by
        #    e.g. SharedStorageWorkspace
        done = {} # -- no order ok
        for node in fgraph.toposort():
            if is_hint_node(node):
                if node.inputs[0] in done: continue
                hints = OrderedDict(node.op.hints)
                if 'shape' in hints:
                    x = node.inputs[0]
                    assert x.ndim == len(hints['shape'])
                    if x in done:
                        assert done[x] == hints['shape']
                    else:
                        var_shape = tuple(
                            map(theano.tensor.as_tensor_variable,
                                hints['shape']))
                        fgraph.shape_feature.shape_of[node.inputs[0]] = var_shape
                        done[x] = hints['shape']

        self.updated_vars = updated_vars
        self.all_inputs = all_inputs
        self.outputs = outputs
        self.unique_outputs = unique_outputs
        self.clone_equiv = clone_equiv
        self.fgraph = fgraph