示例#1
0
def lusubs(lower, upper, bvector, permlist):
    """
    Back substitution for LU decomposition. 
    
    NB. bvector and permlist are just lists, not matrix type vectors. 
    """

    # Check input matrices and vectors/lists for inconsistencies
    ndiml = squaredim(lower, 'lusubs')
    ndimu = squaredim(upper, 'lusubs')

    errortext1 = "lower and upper have different dimensions in lusubs!"
    assert ndimu == ndiml, errortext1
    nb    = len(bvector)
    errortext2 = "inconsistent dimensions in matrices and vector in lusubs!"
    assert nb == ndiml, errortext2

    errortext3 = "inconsistent length of permutation list in lusubs!"
    assert len(permlist) == nb, errortext3
    cvector = reorder(bvector, permlist)


    # First do forward substitution with lower matrix to 
    # create intermediate vector (yvector)
    yvector = array('d', nb*[0.0])
    divisor = float(lower[0][0])
    if abs(divisor) < TINY: divisor = fsign(divisor)*TINY
    yvector[0] = cvector[0] / divisor
    for i in range(1, nb):
        summ = 0.0
        for j in range(0, i):  summ += lower[i][j]*yvector[j]
        divisor = float(lower[i][i])
        if abs(divisor) < TINY: divisor = fsign(divisor)*TINY
        yvector[i] = (cvector[i]-summ) / divisor

    # Then do backward substitution using upper matrix and intermediate 
    # vector to acheive final result
    xvector = array('d', nb*[0.0])
    nbm1 = nb - 1
    divisor = float(upper[nbm1][nbm1])
    if abs(divisor) < TINY: divisor = fsign(divisor)*TINY
    xvector[nbm1] = yvector[nbm1] / divisor
    nbm2 = nbm1 - 1
    for i in range(nbm2, -1, -1):
        summ = 0.0
        ip1  = i + 1
        for j in range(ip1, nb):  summ += upper[i][j]*xvector[j]
        divisor = float(upper[i][i])
        if abs(divisor) < TINY: divisor = fsign(divisor)*TINY
        xvector[i] = (yvector[i]-summ) / divisor


    return xvector
示例#2
0
def lusubs(lower, upper, bvector, permlist):
    """
    Back substitution for LU decomposition. 
    
    NB. bvector and permlist are just lists, not matrix type vectors. 
    """

    # Check input matrices and vectors/lists for inconsistencies
    ndiml = squaredim(lower, 'lusubs')
    ndimu = squaredim(upper, 'lusubs')

    errortext1 = "lower and upper have different dimensions in lusubs!"
    assert ndimu == ndiml, errortext1
    nb = len(bvector)
    errortext2 = "inconsistent dimensions in matrices and vector in lusubs!"
    assert nb == ndiml, errortext2

    errortext3 = "inconsistent length of permutation list in lusubs!"
    assert len(permlist) == nb, errortext3
    cvector = reorder(bvector, permlist)

    # First do forward substitution with lower matrix to
    # create intermediate vector (yvector)
    yvector = array('d', nb * [0.0])
    divisor = float(lower[0][0])
    if abs(divisor) < TINY: divisor = fsign(divisor) * TINY
    yvector[0] = cvector[0] / divisor
    for i in range(1, nb):
        summ = 0.0
        for j in range(0, i):
            summ += lower[i][j] * yvector[j]
        divisor = float(lower[i][i])
        if abs(divisor) < TINY: divisor = fsign(divisor) * TINY
        yvector[i] = (cvector[i] - summ) / divisor

    # Then do backward substitution using upper matrix and intermediate
    # vector to acheive final result
    xvector = array('d', nb * [0.0])
    nbm1 = nb - 1
    divisor = float(upper[nbm1][nbm1])
    if abs(divisor) < TINY: divisor = fsign(divisor) * TINY
    xvector[nbm1] = yvector[nbm1] / divisor
    nbm2 = nbm1 - 1
    for i in range(nbm2, -1, -1):
        summ = 0.0
        ip1 = i + 1
        for j in range(ip1, nb):
            summ += upper[i][j] * xvector[j]
        divisor = float(upper[i][i])
        if abs(divisor) < TINY: divisor = fsign(divisor) * TINY
        xvector[i] = (yvector[i] - summ) / divisor

    return xvector
示例#3
0
    def lhs_sample(self, nparams, nintervals, rcorrmatrix=None, checklevel=0):

        """
        Generates a full Latin Hypercube Sample of uniformly distributed 
        random variates in [0.0, 1.0] placed in a matrix with one realization 
        in each row. A target rank correlation matrix can be given (must have 
        the dimension nsamples*nsamples).
        
        checklevel may be 0, 1 or 2 and is used to control trace printout. 
        0 produces no trace output, whereas 2 produces the most.

        NB. IN ORDER FOR LATIN HYPERCUBE SAMPLING TO BE MEANINGFUL THE OUTPUT 
        STREAM OF RANDOM VARIATES MUST BE HANDLED BY INVERSE METHODS !!!! 

        Latin Hypercube Sampling was first described by McKay, Conover & 
        Beckman in a Technometrics article 1979. The use of the LHS technique 
        to introduce rank correlations was first described by Iman & Conover 
        1982 in an issue of Communications of Statistics.
        """

        # lhs_sample uses the Matrix class to a great extent

        if nparams > nintervals:
            warn("nparams > nintervals in RandomStructure.lhs_sample")

        nsamples     = nintervals   # Just to remember
        rstreaminner = self.rstream
        rstreamouter = self.rstream2

        factor  =  1.0 / float(nintervals)

        tlhsmatrix1 = Matrix()  # tlhsmatrix1 belongs to the Matrix class
        if rcorrmatrix: tscorematrix = Matrix()
        for k in range(0, nparams):
            if rcorrmatrix:
                tnvector, tscorevector = \
                            self.scramble_range(nsamples, rstreamouter, True)
                rowk = array('d', tscorevector)
                tscorematrix.append(rowk)
            else:
                tnvector = self.scramble_range(nsamples, rstreamouter)
            pvector = array('d', [])
            for number in tnvector:
                p  =  factor * (float(number) + rstreaminner.runif01())
                p  =  max(p, 0.0) # Probabilities must be in [0.0, 1.0]
                p  =  min(p, 1.0)
                pvector.append(p)
            tlhsmatrix1.append(pvector)
                
        
        # tlhsmatrix1 (and tscorematrix) are now transposed to run with 
        # one subsample per row to fit with output as well as Iman-Conover 
        # formulation. tlhsmatrix1 and tscorematrix will be used anyway 
        # for some manipulations which are more simple when matrices run 
        # with one variable per row

        lhsmatrix1  = transposed(tlhsmatrix1)
        if rcorrmatrix: scorematrix = transposed(tscorematrix)

        if checklevel == 2:
            print("lhs_sample: Original LHS sample matrix")
            mxdisplay(lhsmatrix1)
            if rcorrmatrix: 
                print("lhs_sample: Target rank correlation matrix")
                mxdisplay(rcorrmatrix)
        if checklevel == 1 or checklevel == 2:
            print("lhs_sample: Rank correlation matrix of")
            print("            original LHS sample matrix")
            trankmatrix1 = Matrix()
            for k in range (0, nparams):
                rowk = array('d', extract_ranks(tlhsmatrix1[k]))
                trankmatrix1.append(rowk)
            mxdisplay(Matrix(corrmatrix(trankmatrix1)))

        if not rcorrmatrix:
            return lhsmatrix1

        else:
            scorecorr = Matrix(corrmatrix(tscorematrix))
            if checklevel == 2:
                print("lhs_sample: Score matrix of original LHS sample matrix")
                mxdisplay(scorematrix)
                print("lhs_sample: Correlation matrix of scores of")
                print("            original LHS sample")
                mxdisplay(scorecorr)

            slower, slowert = ludcmp_chol(scorecorr)
            slowerinverse   = inverted(slower)
            tslowerinverse  = transposed(slowerinverse)
            clower, clowert = ludcmp_chol(rcorrmatrix)
            scoresnostar    = scorematrix*tslowerinverse # Matrix multiplication
            if checklevel == 2:
                print("lhs_sample: Correlation matrix of scoresnostar")
                mxdisplay(corrmatrix(transposed(scoresnostar)))

            scoresstar  = scoresnostar*clowert    # Matrix multiplication
            tscoresstar = transposed(scoresstar)
            trankmatrix = Matrix()
            for k in range (0, nparams):
                trankmatrix.append(extract_ranks(tscoresstar[k]))
            if checklevel == 2:
                print("lhs_sample: scoresstar matrix")
                mxdisplay(scoresstar)
                print("lhs_sample: Correlation matrix of scoresstar")
                mxdisplay(corrmatrix(tscoresstar))
                print("lhs_sample: scoresstar matrix converted to rank")
                mxdisplay(transposed(trankmatrix))
                for k in range(0, nparams):
                    tlhsmatrix1[k] = array('d', sorted(list(tlhsmatrix1[k])))
                print("RandomStructure.lhs_sample: Sorted LHS sample matrix")
                mxdisplay(transposed(tlhsmatrix1))

            tlhsmatrix2 = Matrix()
            for k in range(0, nparams):
                # Sort each row in tlhsmatrix1 and reorder 
                # according to trankmatrix rows
                auxvec = reorder(tlhsmatrix1[k], trankmatrix[k], \
                                                 straighten=True)
                tlhsmatrix2.append(auxvec)
            lhsmatrix2 = transposed(tlhsmatrix2)
            if checklevel == 2:
                print("lhs_sample: Corrected/reordered LHS sample matrix")
                mxdisplay(transposed(tlhsmatrix2))

            if checklevel == 1 or checklevel == 2:
                trankmatrix2 = Matrix()
                auxmatrix2   = tlhsmatrix2
                for k in range (0, nparams):
                    trankmatrix2.append(extract_ranks(auxmatrix2[k]))
                print("lhs_sample: Rank correlation matrix of corrected/")
                print("            /reordered LHS sample matrix")
                mxdisplay(corrmatrix(trankmatrix2))


            return lhsmatrix2
示例#4
0
    def lhs_sample(self, nparams, nintervals, rcorrmatrix=None, checklevel=0):
        """
        Generates a full Latin Hypercube Sample of uniformly distributed 
        random variates in [0.0, 1.0] placed in a matrix with one realization 
        in each row. A target rank correlation matrix can be given (must have 
        the dimension nsamples*nsamples).
        
        checklevel may be 0, 1 or 2 and is used to control trace printout. 
        0 produces no trace output, whereas 2 produces the most.

        NB. IN ORDER FOR LATIN HYPERCUBE SAMPLING TO BE MEANINGFUL THE OUTPUT 
        STREAM OF RANDOM VARIATES MUST BE HANDLED BY INVERSE METHODS !!!! 

        Latin Hypercube Sampling was first described by McKay, Conover & 
        Beckman in a Technometrics article 1979. The use of the LHS technique 
        to introduce rank correlations was first described by Iman & Conover 
        1982 in an issue of Communications of Statistics.
        """

        # lhs_sample uses the Matrix class to a great extent

        if nparams > nintervals:
            warn("nparams > nintervals in RandomStructure.lhs_sample")

        nsamples = nintervals  # Just to remember
        rstreaminner = self.rstream
        rstreamouter = self.rstream2

        factor = 1.0 / float(nintervals)

        tlhsmatrix1 = Matrix()  # tlhsmatrix1 belongs to the Matrix class
        if rcorrmatrix: tscorematrix = Matrix()
        for k in range(0, nparams):
            if rcorrmatrix:
                tnvector, tscorevector = \
                            self.scramble_range(nsamples, rstreamouter, True)
                rowk = array('d', tscorevector)
                tscorematrix.append(rowk)
            else:
                tnvector = self.scramble_range(nsamples, rstreamouter)
            pvector = array('d', [])
            for number in tnvector:
                p = factor * (float(number) + rstreaminner.runif01())
                p = max(p, 0.0)  # Probabilities must be in [0.0, 1.0]
                p = min(p, 1.0)
                pvector.append(p)
            tlhsmatrix1.append(pvector)

        # tlhsmatrix1 (and tscorematrix) are now transposed to run with
        # one subsample per row to fit with output as well as Iman-Conover
        # formulation. tlhsmatrix1 and tscorematrix will be used anyway
        # for some manipulations which are more simple when matrices run
        # with one variable per row

        lhsmatrix1 = transposed(tlhsmatrix1)
        if rcorrmatrix: scorematrix = transposed(tscorematrix)

        if checklevel == 2:
            print("lhs_sample: Original LHS sample matrix")
            mxdisplay(lhsmatrix1)
            if rcorrmatrix:
                print("lhs_sample: Target rank correlation matrix")
                mxdisplay(rcorrmatrix)
        if checklevel == 1 or checklevel == 2:
            print("lhs_sample: Rank correlation matrix of")
            print("            original LHS sample matrix")
            trankmatrix1 = Matrix()
            for k in range(0, nparams):
                rowk = array('d', extract_ranks(tlhsmatrix1[k]))
                trankmatrix1.append(rowk)
            mxdisplay(Matrix(corrmatrix(trankmatrix1)))

        if not rcorrmatrix:
            return lhsmatrix1

        else:
            scorecorr = Matrix(corrmatrix(tscorematrix))
            if checklevel == 2:
                print("lhs_sample: Score matrix of original LHS sample matrix")
                mxdisplay(scorematrix)
                print("lhs_sample: Correlation matrix of scores of")
                print("            original LHS sample")
                mxdisplay(scorecorr)

            slower, slowert = ludcmp_chol(scorecorr)
            slowerinverse = inverted(slower)
            tslowerinverse = transposed(slowerinverse)
            clower, clowert = ludcmp_chol(rcorrmatrix)
            scoresnostar = scorematrix * tslowerinverse  # Matrix multiplication
            if checklevel == 2:
                print("lhs_sample: Correlation matrix of scoresnostar")
                mxdisplay(corrmatrix(transposed(scoresnostar)))

            scoresstar = scoresnostar * clowert  # Matrix multiplication
            tscoresstar = transposed(scoresstar)
            trankmatrix = Matrix()
            for k in range(0, nparams):
                trankmatrix.append(extract_ranks(tscoresstar[k]))
            if checklevel == 2:
                print("lhs_sample: scoresstar matrix")
                mxdisplay(scoresstar)
                print("lhs_sample: Correlation matrix of scoresstar")
                mxdisplay(corrmatrix(tscoresstar))
                print("lhs_sample: scoresstar matrix converted to rank")
                mxdisplay(transposed(trankmatrix))
                for k in range(0, nparams):
                    tlhsmatrix1[k] = array('d', sorted(list(tlhsmatrix1[k])))
                print("RandomStructure.lhs_sample: Sorted LHS sample matrix")
                mxdisplay(transposed(tlhsmatrix1))

            tlhsmatrix2 = Matrix()
            for k in range(0, nparams):
                # Sort each row in tlhsmatrix1 and reorder
                # according to trankmatrix rows
                auxvec = reorder(tlhsmatrix1[k], trankmatrix[k], \
                                                 straighten=True)
                tlhsmatrix2.append(auxvec)
            lhsmatrix2 = transposed(tlhsmatrix2)
            if checklevel == 2:
                print("lhs_sample: Corrected/reordered LHS sample matrix")
                mxdisplay(transposed(tlhsmatrix2))

            if checklevel == 1 or checklevel == 2:
                trankmatrix2 = Matrix()
                auxmatrix2 = tlhsmatrix2
                for k in range(0, nparams):
                    trankmatrix2.append(extract_ranks(auxmatrix2[k]))
                print("lhs_sample: Rank correlation matrix of corrected/")
                print("            /reordered LHS sample matrix")
                mxdisplay(corrmatrix(trankmatrix2))

            return lhsmatrix2