def reduce_jacobian(J, i1, i2, idx, ish, o1, o2, odx, osh):
    """ Return the subportion of the Jacobian that is valid for a particular
    input and output slice.

    J: 2D ndarray
        Full Jacobian

    i1, i2: int, int
        Start and end index for the input variable

    o1, o2: int, int
        Start and end index for the output variable

    idx, odx: str, str
        Index strings for the input and output, if they are arrays. These
        are None if the entries in the Jacobian have already sliced the
        array for us (this can happen with pseudoAssemblies), in which case
        we need to do no work.

    ish, osh: tuples
        Shapes of the original input and output variables before being
        flattened.
    """

    if idx or odx:
        if idx: # J inputs
            istring, ix = flatten_slice(idx, ish, offset=i1, name='ix')
        else: # The entire array, already flat
            istring = 'i1:i2'

        if odx: # J Outputs
            ostring, ox = flatten_slice(odx, osh, offset=o1, name='ox')
        else: # The entire array, already flat
            ostring = 'o1:o2'

        if ':' not in ostring and len(ox) > 1:
            ostring = 'vstack(%s)' % ostring
        if ':' not in istring and len(ix) > 1:
            istring = 'hstack(%s)' % istring

        return eval('J[%s, %s]' % (ostring, istring))
    else:
        return J[o1:o2, i1:i2]
Beispiel #2
0
def reduce_jacobian(J, i1, i2, idx, ish, o1, o2, odx, osh):
    """ Return the subportion of the Jacobian that is valid for a particular
    input and output slice.

    J: 2D ndarray
        Full Jacobian

    i1, i2: int, int
        Start and end index for the input variable

    o1, o2: int, int
        Start and end index for the output variable

    idx, odx: str, str
        Index strings for the input and output, if they are arrays. These
        are None if the entries in the Jacobian have already sliced the
        array for us (this can happen with pseudoAssemblies), in which case
        we need to do no work.

    ish, osh: tuples
        Shapes of the original input and output variables before being
        flattened.
    """

    if idx or odx:
        if idx:  # J inputs
            istring, ix = flatten_slice(idx, ish, offset=i1, name='ix')
        else:  # The entire array, already flat
            istring = 'i1:i2'

        if odx:  # J Outputs
            ostring, ox = flatten_slice(odx, osh, offset=o1, name='ox')
        else:  # The entire array, already flat
            ostring = 'o1:o2'

        if ':' not in ostring and len(ox) > 1:
            ostring = 'vstack(%s)' % ostring
        if ':' not in istring and len(ix) > 1:
            istring = 'hstack(%s)' % istring

        return eval('J[%s, %s]' % (ostring, istring))
    else:
        return J[o1:o2, i1:i2]
    def test_general_flatten_slice(self):
        # Test capability to flatten any slice.
        shape = (5,)
        index = "[1]"
        flat_str, ii = flatten_slice(index, shape, name="ii")

        self.assertTrue(flat_str == "ii:ii+1")
        self.assertEqual(ii, 1)

        shape = (3, 4)
        index = "[1, 2]"
        flat_str, ii = flatten_slice(index, shape, name="ii")

        self.assertTrue(flat_str == "ii:ii+1")
        self.assertEqual(ii, 6)

        shape = (9, 7)
        index = "[-1, -1]"
        flat_str, ii = flatten_slice(index, shape, name="ii")

        self.assertTrue(flat_str == "ii")
        self.assertEqual(ii, 62)

        shape = (4, 7)
        index = "[:, 3]"
        flat_str, ii = flatten_slice(index, shape, name="ii")

        self.assertTrue(flat_str == "ii")
        self.assertTrue(set(ii) == set([3, 10, 17, 24]))

        shape = (50,)
        index = "[-2]"
        flat_str, ii = flatten_slice(index, shape, name="ii")

        self.assertTrue(flat_str == "ii")
        self.assertEqual(ii, 48)

        shape = (50,)
        index = "[3:-3:5]"
        flat_str, ii = flatten_slice(index, shape, name="ii")

        self.assertTrue(flat_str == "ii")
        self.assertTrue(set(ii) == set([3, 8, 13, 18, 23, 28, 33, 38, 43]))
    def test_general_flatten_slice(self):
        # Test capability to flatten any slice.
        shape = (5, )
        index = '[1]'
        flat_str, ii = flatten_slice(index, shape, name='ii')

        self.assertTrue(flat_str == 'ii:ii+1')
        self.assertEqual(ii, 1)

        shape = (3, 4)
        index = '[1, 2]'
        flat_str, ii = flatten_slice(index, shape, name='ii')

        self.assertTrue(flat_str == 'ii:ii+1')
        self.assertEqual(ii, 6)

        shape = (9, 7)
        index = '[-1, -1]'
        flat_str, ii = flatten_slice(index, shape, name='ii')

        self.assertTrue(flat_str == 'ii')
        self.assertEqual(ii, 62)

        shape = (4, 7)
        index = '[:, 3]'
        flat_str, ii = flatten_slice(index, shape, name='ii')

        self.assertTrue(flat_str == 'ii')
        self.assertTrue(set(ii) == set([3, 10, 17, 24]))

        shape = (50, )
        index = '[-2]'
        flat_str, ii = flatten_slice(index, shape, name='ii')

        self.assertTrue(flat_str == 'ii')
        self.assertEqual(ii, 48)

        shape = (50, )
        index = '[3:-3:5]'
        flat_str, ii = flatten_slice(index, shape, name='ii')

        self.assertTrue(flat_str == 'ii')
        self.assertTrue(set(ii) == set([3, 8, 13, 18, 23, 28, 33, 38, 43]))
    def test_general_flatten_slice(self):

        shape = (5,)
        index = '[1]'
        flat_str, ii = flatten_slice(index, shape, name='ii')

        self.assertTrue(flat_str=='ii:ii+1')
        self.assertEqual(ii, 1)

        shape = (3, 4)
        index = '[1, 2]'
        flat_str, ii = flatten_slice(index, shape, name='ii')

        self.assertTrue(flat_str=='ii:ii+1')
        self.assertEqual(ii, 6)

        shape = (9, 7)
        index = '[-1, -1]'
        flat_str, ii = flatten_slice(index, shape, name='ii')

        self.assertTrue(flat_str=='ii')
        self.assertEqual(ii, 62)

        shape = (4, 7)
        index = '[:, 3]'
        flat_str, ii = flatten_slice(index, shape, name='ii')

        self.assertTrue(flat_str=='ii')
        self.assertTrue(set(ii)==set([3, 10, 17, 24]))

        shape = (50,)
        index = '[-2]'
        flat_str, ii = flatten_slice(index, shape, name='ii')

        self.assertTrue(flat_str=='ii')
        self.assertEqual(ii, 48)

        shape = (50,)
        index = '[3:-3:5]'
        flat_str, ii = flatten_slice(index, shape, name='ii')

        self.assertTrue(flat_str=='ii')
        self.assertTrue(set(ii)==set([3, 8, 13, 18, 23, 28, 33, 38, 43]))
    def initialize_residual(self):
        """Creates the array that stores the residual. Also returns the
        number of edges.
        """
        dgraph = self.derivative_graph()
        if 'mapped_inputs' in dgraph.graph:
            inputs = dgraph.graph['mapped_inputs']
        else:
            inputs = dgraph.graph['inputs']

        basevars = set()
        edges = self.edge_list()
        implicit_edges = self.get_implicit_info()
        sortedkeys = sorted(implicit_edges)
        sortedkeys.extend(sorted(edges.keys()))

        nEdge = 0
        for src in sortedkeys:

            if src in implicit_edges:
                targets = implicit_edges[src]
                is_implicit = True
            else:
                targets = edges[src]
                is_implicit = False

            if isinstance(targets, str):
                targets = [targets]

            # Implicit source edges are tuples.
            if is_implicit == True:
                impli_edge = nEdge
                for resid in src:
                    unmap_src = from_PA_var(resid)

                    val = self.scope.get(unmap_src)
                    width = flattened_size(unmap_src, val, self.scope)

                    if isinstance(val, ndarray):
                        shape = val.shape
                    else:
                        shape = 1

                    bound = (impli_edge, impli_edge + width)
                    self.set_bounds(resid, bound)
                    basevars.add(resid)
                    impli_edge += width

            # Regular components
            else:

                # Only need to grab the source (or first target for param) to
                # figure out the size for the residual vector
                measure_src = src
                if '@in' in src:
                    idx = int(src[3:].split('[')[0])
                    inp = inputs[idx]
                    if not isinstance(inp, basestring):
                        inp = inp[0]
                    if inp in dgraph:
                        measure_src = inp
                    else:
                        measure_src = targets[0]
                elif src == '@fake':
                    for t in targets:
                        if not t.startswith('@'):
                            measure_src = t
                            break
                    else:
                        raise RuntimeError("malformed graph!")

                # Find our width, etc.
                unmap_src = from_PA_var(measure_src)
                val = self.scope.get(unmap_src)
                width = flattened_size(unmap_src, val, self.scope)
                if isinstance(val, ndarray):
                    shape = val.shape
                else:
                    shape = 1

                # Special poke for boundary node
                if is_boundary_node(dgraph, measure_src) or \
                   is_boundary_node(dgraph, dgraph.base_var(measure_src)):
                    bound = (nEdge, nEdge + width)
                    self.set_bounds(measure_src, bound)

                src_noidx = src.split('[', 1)[0]

                # Poke our source data

                # Array slice of src that is already allocated
                if '[' in src and src_noidx in basevars:
                    _, _, idx = src.partition('[')
                    basebound = self.get_bounds(src_noidx)
                    if not '@in' in src_noidx:
                        unmap_src = from_PA_var(src_noidx)
                        val = self.scope.get(unmap_src)
                        shape = val.shape
                    offset = basebound[0]
                    istring, ix = flatten_slice(idx,
                                                shape,
                                                offset=offset,
                                                name='ix')
                    bound = (istring, ix)
                    # Already allocated
                    width = 0

                # Input-input connection to implicit state
                elif src_noidx in basevars:
                    bound = self.get_bounds(src_noidx)
                    width = 0

                # Normal src
                else:
                    bound = (nEdge, nEdge + width)

                self.set_bounds(src, bound)
                basevars.add(src)

            # Poke our target data
            impli_edge = nEdge
            for target in targets:

                # Handle States in implicit comps
                if is_implicit == True:

                    if isinstance(target, str):
                        target = [target]

                    unmap_targ = from_PA_var(target[0])
                    val = self.scope.get(unmap_targ)
                    imp_width = flattened_size(unmap_targ, val, self.scope)
                    if isinstance(val, ndarray):
                        shape = val.shape
                    else:
                        shape = 1

                    for itarget in target:
                        bound = (impli_edge, impli_edge + imp_width)
                        self.set_bounds(itarget, bound)
                        basevars.add(itarget)

                    impli_edge += imp_width
                    width = impli_edge - nEdge

                elif not target.startswith('@'):
                    self.set_bounds(target, bound)

            #print input_src, src, target, bound,
            nEdge += width
            impli_edge = nEdge

        # Initialize the residual vector on the first time through, and also
        # if for some reason the number of edges has changed.
        if self.res is None or nEdge != self.res.shape[0]:
            self.res = zeros((nEdge, 1))

        return nEdge
    def initialize_residual(self):
        """Creates the array that stores the residual. Also returns the
        number of edges.
        """
        dgraph = self.derivative_graph()
        if 'mapped_inputs' in dgraph.graph:
            inputs = dgraph.graph['mapped_inputs']
        else:
            inputs = dgraph.graph['inputs']

        basevars = set()
        edges = self.edge_list()
        implicit_edges = self.get_implicit_info()
        sortedkeys = sorted(implicit_edges)
        sortedkeys.extend(sorted(edges.keys()))

        nEdge = 0
        for src in sortedkeys:

            if src in implicit_edges:
                targets = implicit_edges[src]
                is_implicit = True
            else:
                targets = edges[src]
                is_implicit = False

            if isinstance(targets, str):
                targets = [targets]

            # Implicit source edges are tuples.
            if is_implicit == True:
                impli_edge = nEdge
                for resid in src:
                    unmap_src = from_PA_var(resid)

                    val = self.scope.get(unmap_src)
                    width = flattened_size(unmap_src, val, self.scope)

                    if isinstance(val, ndarray):
                        shape = val.shape
                    else:
                        shape = 1

                    bound = (impli_edge, impli_edge+width)
                    self.set_bounds(resid, bound)
                    basevars.add(resid)
                    impli_edge += width

            # Regular components
            else:

                # Only need to grab the source (or first target for param) to
                # figure out the size for the residual vector
                measure_src = src
                if '@in' in src:
                    idx = int(src[3:].split('[')[0])
                    inp = inputs[idx]
                    if not isinstance(inp, basestring):
                        inp = inp[0]
                    if inp in dgraph:
                        measure_src = inp
                    else:
                        measure_src = targets[0]
                elif src == '@fake':
                    for t in targets:
                        if not t.startswith('@'):
                            measure_src = t
                            break
                    else:
                        raise RuntimeError("malformed graph!")

                # Find our width, etc.
                unmap_src = from_PA_var(measure_src)
                val = self.scope.get(unmap_src)
                width = flattened_size(unmap_src, val, self.scope)
                if isinstance(val, ndarray):
                    shape = val.shape
                else:
                    shape = 1

                # Special poke for boundary node
                if is_boundary_node(dgraph, measure_src) or \
                   is_boundary_node(dgraph, dgraph.base_var(measure_src)):
                    bound = (nEdge, nEdge+width)
                    self.set_bounds(measure_src, bound)

                src_noidx = src.split('[', 1)[0]

                # Poke our source data

                # Array slice of src that is already allocated
                if '[' in src and src_noidx in basevars:
                    _, _, idx = src.partition('[')
                    basebound = self.get_bounds(src_noidx)
                    if not '@in' in src_noidx:
                        unmap_src = from_PA_var(src_noidx)
                        val = self.scope.get(unmap_src)
                        shape = val.shape
                    offset = basebound[0]
                    istring, ix = flatten_slice(idx, shape, offset=offset,
                                                name='ix')
                    bound = (istring, ix)
                    # Already allocated
                    width = 0

                # Input-input connection to implicit state
                elif src_noidx in basevars:
                    bound = self.get_bounds(src_noidx)
                    width = 0

                # Normal src
                else:
                    bound = (nEdge, nEdge+width)

                self.set_bounds(src, bound)
                basevars.add(src)

            # Poke our target data
            impli_edge = nEdge
            for target in targets:

                # Handle States in implicit comps
                if is_implicit == True:

                    if isinstance(target, str):
                        target = [target]

                    unmap_targ = from_PA_var(target[0])
                    val = self.scope.get(unmap_targ)
                    imp_width = flattened_size(unmap_targ, val, self.scope)
                    if isinstance(val, ndarray):
                        shape = val.shape
                    else:
                        shape = 1

                    for itarget in target:
                        bound = (impli_edge, impli_edge+imp_width)
                        self.set_bounds(itarget, bound)
                        basevars.add(itarget)

                    impli_edge += imp_width
                    width = impli_edge - nEdge

                elif not target.startswith('@'):
                    self.set_bounds(target, bound)

            #print input_src, src, target, bound,
            nEdge += width
            impli_edge = nEdge

        # Initialize the residual vector on the first time through, and also
        # if for some reason the number of edges has changed.
        if self.res is None or nEdge != self.res.shape[0]:
            self.res = zeros((nEdge, 1))

        return nEdge