Exemplo n.º 1
0
    sit_sot_inner_outputs = []
    sit_sot_return_steps = {}
    sit_sot_rightOrder = []

    # go through outputs picking up time slices as needed
    for i, init_out in enumerate(outs_info):
        # Note that our convention dictates that if an output uses
        # just the previous time step, as a initial state we will only
        # provide a tensor of the same dimension as one time step; This
        # makes code much cleaner for those who do not use taps. Otherwise
        # they would always had to shape_padleft the initial state ..
        # which is ugly
        if init_out.get('taps', None) == [-1]:

            actual_arg = init_out['initial']
            arg = safe_new(init_out['initial'])
            if isinstance(arg, tensor.Constant):
                # safe new returns a clone of the constants, but that is not
                # what we need for initial states
                arg = arg.type()

            # Try to transfer test_value to the new variable
            if config.compute_test_value != 'off':
                try:
                    arg.tag.test_value = gof.Op._get_test_value(actual_arg)
                except AttributeError, e:
                    if config.compute_test_value != 'ignore':
                        # No need to print a warning or raise an error now,
                        # it will be done when fn will be called.
                        _logger.info(('Cannot compute test value for the '
                            'inner function of scan, input value missing %s'),
Exemplo n.º 2
0
    def process_node(self, env, node):
        # this flag tells if there was any change during the last iterations
        changed = True
        clean_inputs, clean_outputs = scan_utils.reconstruct_graph(node.op.inputs, node.op.outputs)

        local_env = gof.Env(clean_inputs, clean_outputs)
        max_iterations = 2 * len(local_env.toposort()) + 3
        counts = 0
        to_remove = []
        to_replace = []
        replace_with_in = []
        replace_with_out = []
        op = node.op
        # Construct the list of non_sequences to simplify a few things
        st = op.n_seqs
        st += int(numpy.sum([len(x) for x in op.tap_array[: (op.n_mit_mot + op.n_mit_sot)]]))
        st += op.n_sit_sot
        st += op.n_shared_outs
        non_seqs = clean_inputs[st:]
        st = op.n_seqs + op.n_mit_mot + op.n_mit_sot + op.n_sit_sot + op.n_nit_sot + op.n_shared_outs + 1
        outer_non_seqs = node.inputs[st:]
        assert len(non_seqs) == len(outer_non_seqs)
        while changed and counts < max_iterations:
            counts += 1
            changed = False

            for nd in local_env.toposort():
                if (
                    numpy.all(
                        [(x in non_seqs) or (x.owner in to_remove) or isinstance(x, tensor.Constant) for x in nd.inputs]
                    )
                    and
                    # we can do this because the assumption is that a
                    # viewOp or deepCopyOp will be just at the end of the
                    # function and not somewhere in the middle ..
                    not isinstance(nd.op, theano.compile.ViewOp)
                    and not isinstance(nd.op, theano.compile.DeepCopyOp)
                    and
                    # and we didn't already looked at this node
                    not nd in to_remove
                ):

                    # We have a candidate node to removable
                    # Step 1. Reconstruct it on outside
                    to_remove.append(nd)
                    outside_ins = []
                    for x in nd.inputs:
                        if x in non_seqs:
                            outside_ins += [outer_non_seqs[non_seqs.index(x)]]
                        elif x in to_replace:
                            outside_ins += [replace_with_out[to_replace.index(x)]]
                        elif isinstance(x, theano.Constant):
                            outside_ins += [x.clone()]
                        else:
                            raise Exception(
                                (
                                    "Error in the `scan_pushout_non_seq_"
                                    "operations`. The optimization tries "
                                    "to move some computation fron scan "
                                    "which is not allowed to move. Report "
                                    "this on theano-users list"
                                ),
                                x,
                            )
                    nw_outer_node = nd.op.make_node(*outside_ins)
                    # Step 2. Create variables for replacements
                    for idx, y in enumerate(nd.outputs):

                        y_place_holder = scan_utils.safe_new(y, "_replace")
                        to_replace += [y]
                        replace_with_in += [y_place_holder]
                        assert type(y) == type(nw_outer_node.outputs[idx])
                        replace_with_out += [nw_outer_node.outputs[idx]]
                    changed = True
        if counts >= max_iterations:
            raise Exception(
                "Error in the `scan_pushout_non_seq_operations`."
                " The optimization exhausted the maximal number "
                "of iterations allowed!"
            )
        # We need to check all candidate replacements and choose those that
        # make sense for us

        # Step 1. which elements of `to_replace` are used by remaining
        # components of the inner function
        clean_to_replace = []
        clean_replace_with_in = []
        clean_replace_with_out = []
        existent_nodes = [nd for nd in local_env.toposort() if nd not in to_remove]
        to_keep = []
        for nd in existent_nodes:
            to_keep += nd.inputs
        for idx, out in enumerate(to_replace):
            if out in to_keep and out.owner not in existent_nodes:
                clean_to_replace += [out]
                clean_replace_with_in += [replace_with_in[idx]]
                clean_replace_with_out += [replace_with_out[idx]]

        if len(clean_to_replace) > 0:
            # We can finally put an end to all this madness
            givens = {}
            nw_outer = []
            nw_inner = []
            for to_repl, repl_in, repl_out in zip(clean_to_replace, clean_replace_with_in, clean_replace_with_out):
                if isinstance(repl_out, theano.Constant):
                    repl_in = repl_out.clone()
                else:
                    nw_inner += [repl_in]
                    nw_outer += [repl_out]
                givens[to_repl] = repl_in

            _op_outs = scan_utils.clone(clean_outputs, replace=givens)
            _op_ins = clean_inputs + nw_inner
            op_ins, op_outs = scan_utils.reconstruct_graph(_op_ins, _op_outs)
            # Reconstruct node
            nwScan = scan_op.Scan(op_ins, op_outs, op.info)
            nw_node = nwScan.make_node(*(node.inputs + nw_outer))
            env.replace_all_validate(zip(node.outputs, nw_node.outputs), reason="scan_push_computation_out")
            return True
        elif to_keep == []:
            # Nothing in the inner graph should be kept
            replace_with = {}
            for idx, out in enumerate(to_replace):
                if out in local_env.outputs:
                    x = node.outputs[local_env.outputs.index(out)]
                    y = replace_with_out[idx]
                    shape = [y.shape[idx] for idx in xrange(y.ndim)]
                    replace_with[x] = tensor.alloc(y, node.inputs[0], *shape)

            # We need to add one extra dimension to the outputs
            env.replace_all_validate(replace_with.items(), reason="scan_push_computation_out")

        else:
            return False
Exemplo n.º 3
0
    sit_sot_inner_outputs = []
    sit_sot_return_steps = {}
    sit_sot_rightOrder = []

    # go through outputs picking up time slices as needed
    for i, init_out in enumerate(outs_info):
        # Note that our convention dictates that if an output uses
        # just the previous time step, as a initial state we will only
        # provide a tensor of the same dimension as one time step; This
        # makes code much cleaner for those who do not use taps. Otherwise
        # they would always had to shape_padleft the initial state ..
        # which is ugly
        if init_out.get('taps', None) == [-1]:

            actual_arg = init_out['initial']
            arg = safe_new(init_out['initial'])
            if isinstance(arg, tensor.Constant):
                # safe new returns a clone of the constants, but that is not
                # what we need for initial states
                arg = arg.type()

            # Try to transfer test_value to the new variable
            if config.compute_test_value != 'off':
                try:
                    arg.tag.test_value = gof.Op._get_test_value(actual_arg)
                except AttributeError, e:
                    if config.compute_test_value != 'ignore':
                        # No need to print a warning or raise an error now,
                        # it will be done when fn will be called.
                        _logger.info(
                            ('Cannot compute test value for the '
Exemplo n.º 4
0
    def process_node(self, fgraph, node):
        # this flag tells if there was any change during the last iterations
        changed = True
        clean_inputs, clean_outputs = scan_utils.reconstruct_graph(
                        node.op.inputs, node.op.outputs)

        local_fgraph = gof.FunctionGraph(clean_inputs, clean_outputs)
        max_iterations = 2 * len(local_fgraph.toposort()) + 3
        counts = 0
        to_remove = []
        to_replace = []
        replace_with_in = []
        replace_with_out = []
        op = node.op
        # Construct the list of non_sequences to simplify a few things
        st = op.n_seqs
        st += int(numpy.sum([len(x) for x in
                             op.tap_array[:(op.n_mit_mot + op.n_mit_sot)]]))
        st += op.n_sit_sot
        st += op.n_shared_outs
        non_seqs = clean_inputs[st:]
        st = (op.n_seqs +
              op.n_mit_mot +
              op.n_mit_sot +
              op.n_sit_sot +
              op.n_nit_sot +
              op.n_shared_outs + 1)
        outer_non_seqs = node.inputs[st:]
        assert len(non_seqs) == len(outer_non_seqs)
        while changed and counts < max_iterations:
            counts += 1
            changed = False

            for nd in local_fgraph.toposort():
                if (numpy.all([(x in non_seqs) or
                               (x.owner in to_remove) or
                               isinstance(x, tensor.Constant)
                                 for x in nd.inputs]) and
                        # we can do this because the assumption is that a
                        # viewOp or deepCopyOp will be just at the end of the
                        # function and not somewhere in the middle ..
                        not isinstance(nd.op, theano.compile.ViewOp) and
                        not isinstance(nd.op, theano.compile.DeepCopyOp) and
                        # and we didn't already looked at this node
                        not nd in to_remove):

                    # We have a candidate node to removable
                    # Step 1. Reconstruct it on outside
                    to_remove.append(nd)
                    outside_ins = []
                    for x in nd.inputs:
                        if x in non_seqs:
                            outside_ins += [outer_non_seqs[non_seqs.index(x)]]
                        elif x in to_replace:
                            outside_ins += [
                                replace_with_out[to_replace.index(x)]]
                        elif isinstance(x, theano.Constant):
                            outside_ins += [x.clone()]
                        else:
                            raise Exception(
                                ('Error in the `scan_pushout_non_seq_'
                                 'operations`. The optimization tries '
                                 'to move some computation fron scan '
                                 'which is not allowed to move. Report '
                                 'this on theano-users list'), x)
                    outside_ins = [x.type.filter_variable(y) for x,y in
                                   zip(nd.inputs, outside_ins)]
                    nw_outer_node = nd.op.make_node(*outside_ins)
                    # Step 2. Create variables for replacements
                    for idx, y in enumerate(nd.outputs):

                        y_place_holder = scan_utils.safe_new(y, '_replace')
                        to_replace += [y]
                        replace_with_in += [y_place_holder]
                        assert type(y) == type(nw_outer_node.outputs[idx])
                        replace_with_out += [nw_outer_node.outputs[idx]]
                    changed = True
        if counts >= max_iterations:
            raise Exception('Error in the `scan_pushout_non_seq_operations`.'
                            ' The optimization exhausted the maximal number '
                            'of iterations allowed!')
        # We need to check all candidate replacements and choose those that
        # make sense for us

        # Step 1. which elements of `to_replace` are used by remaining
        # components of the inner function
        clean_to_replace = []
        clean_replace_with_in = []
        clean_replace_with_out = []
        existent_nodes = [nd for nd in local_fgraph.toposort()
                            if nd not in to_remove]
        to_keep = []
        for nd in existent_nodes:
            to_keep += nd.inputs
        for idx, out in enumerate(to_replace):
            if out in to_keep and out.owner not in existent_nodes:
                clean_to_replace += [out]
                clean_replace_with_in += [replace_with_in[idx]]
                clean_replace_with_out += [replace_with_out[idx]]

        if len(clean_to_replace) > 0:
            # We can finally put an end to all this madness
            givens = {}
            nw_outer = []
            nw_inner = []
            for to_repl, repl_in, repl_out in zip(clean_to_replace,
                                              clean_replace_with_in,
                                              clean_replace_with_out):
                if isinstance(repl_out, theano.Constant):
                    repl_in = repl_out.clone()
                else:
                    nw_inner += [repl_in]
                    nw_outer += [repl_out]
                givens[to_repl] = repl_in

            _op_outs = scan_utils.clone(clean_outputs,
                                        replace=givens)
            _op_ins = clean_inputs + nw_inner
            op_ins, op_outs = scan_utils.reconstruct_graph(_op_ins, _op_outs)
            # Reconstruct node
            nwScan = scan_op.Scan(op_ins, op_outs, op.info)
            nw_node = nwScan.make_node(* (node.inputs + nw_outer))
            fgraph.replace_all_validate_remove(
                zip(node.outputs, nw_node.outputs),
                remove=[node],
                reason='scan_push_computation_out')
            return True
        elif to_keep == []:
            # Nothing in the inner graph should be kept
            replace_with = {}
            for idx, out in enumerate(to_replace):
                if out in local_fgraph.outputs:
                    x = node.outputs[local_fgraph.outputs.index(out)]
                    y = replace_with_out[idx]
                    shape = [y.shape[idx] for idx in xrange(y.ndim)]
                    replace_with[x] = tensor.alloc(y,
                                                   node.inputs[0],
                                                   *shape)

            # We need to add one extra dimension to the outputs
            if replace_with:
                fgraph.replace_all_validate_remove(
                    replace_with.items(),
                    remove=[node],
                    reason='scan_push_computation_out')

        else:
            return False
Exemplo n.º 5
0
    sit_sot_inner_outputs = []
    sit_sot_return_steps = OrderedDict()
    sit_sot_rightOrder = []

    # go through outputs picking up time slices as needed
    for i, init_out in enumerate(outs_info):
        # Note that our convention dictates that if an output uses
        # just the previous time step, as a initial state we will only
        # provide a tensor of the same dimension as one time step; This
        # makes code much cleaner for those who do not use taps. Otherwise
        # they would always had to shape_padleft the initial state ..
        # which is ugly
        if init_out.get("taps", None) == [-1]:

            actual_arg = init_out["initial"]
            arg = safe_new(init_out["initial"])
            if isinstance(arg, tensor.Constant):
                # safe new returns a clone of the constants, but that is not
                # what we need for initial states
                arg = arg.type()

            # Try to transfer test_value to the new variable
            if config.compute_test_value != "off":
                try:
                    arg.tag.test_value = gof.Op._get_test_value(actual_arg)
                except AttributeError, e:
                    if config.compute_test_value != "ignore":
                        # No need to print a warning or raise an error now,
                        # it will be done when fn will be called.
                        _logger.info(
                            ("Cannot compute test value for the " "inner function of scan, input value missing %s"), e