Exemplo n.º 1
0
    def __init__(self,
                 fgainmatrixC,
                 fvariablenamesC,
                 fconnectionmatrixC,
                 bgainmatrixC,
                 bvariablenamesC,
                 bconnectionmatrixC,
                 nodummyvariablelistC,
                 fgainmatrix,
                 fvariablenames,
                 fconnectionmatrix,
                 bgainmatrix,
                 bvariablenames,
                 bconnectionmatrix,
                 alpha=0.35):
        """This constructor will:
        1) create a graph with associated node importances based on local gain information
        2) create a graph with associated node importances based on partial correlation data"""

        self.forwardgain = gRanking(self.normaliseMatrix(fgainmatrixC),
                                    fvariablenamesC)
        self.backwardgain = gRanking(self.normaliseMatrix(bgainmatrixC),
                                     bvariablenamesC)
        self.forwardgainNC = gRanking(self.normaliseMatrix(fgainmatrix),
                                      fvariablenames)
        self.backwardgainNC = gRanking(self.normaliseMatrix(bgainmatrix),
                                       bvariablenames)
        self.createBlendedRanking(nodummyvariablelistC, alpha)
        self.createBlendedRankingNoControl(nodummyvariablelistC, alpha)
Exemplo n.º 2
0
 def __init__(self, fgainmatrixC, fvariablenamesC, fconnectionmatrixC, bgainmatrixC, bvariablenamesC, bconnectionmatrixC, nodummyvariablelistC, fgainmatrix, fvariablenames, fconnectionmatrix, bgainmatrix, bvariablenames, bconnectionmatrix, alpha = 0.35 ):
     """This constructor will:
     1) create a graph with associated node importances based on local gain information
     2) create a graph with associated node importances based on partial correlation data"""
     
     self.forwardgain = gRanking(self.normaliseMatrix(fgainmatrixC), fvariablenamesC)      
     self.backwardgain = gRanking(self.normaliseMatrix(bgainmatrixC), bvariablenamesC)
     self.forwardgainNC = gRanking(self.normaliseMatrix(fgainmatrix), fvariablenames)      
     self.backwardgainNC = gRanking(self.normaliseMatrix(bgainmatrix), bvariablenames)
     self.createBlendedRanking(nodummyvariablelistC, alpha)
     self.createBlendedRankingNoControl(nodummyvariablelistC, alpha)
Exemplo n.º 3
0
    def testGRankInputMatrixThree(self):
        mat1 = [[0, 0, 0, 0, 0, 0, 1.0 / 3, 0],
                [1.0 / 2, 0, 1.0 / 2, 1.0 / 3, 0, 0, 0, 0],
                [1.0 / 2, 0, 0, 0, 0, 0, 0, 0], [0, 1, 0, 0, 0, 0, 0, 0],
                [0, 0, 1.0 / 2, 1.0 / 3, 0, 0, 1.0 / 3, 0],
                [0, 0, 0, 1.0 / 3, 1.0 / 3, 0, 0, 1.0 / 2],
                [0, 0, 0, 0, 1.0 / 3, 0, 0, 1.0 / 2],
                [0, 0, 0, 0, 1.0 / 3, 1, 1.0 / 3, 0]]
        mat2 = ['var1', 'var2', 'var3', 'var4', 'var5', 'var6', 'var7', 'var8']

        testThree = gRanking(mat1, mat2)  #create

        from numpy import shape  #check if gain matrix is square
        [row, col] = shape(testThree.gMatrix)
        self.assertNotEqual(row, testThree.n + 1,
                            "The matrix is not square: rows")
        self.assertNotEqual(col, testThree.n + 1,
                            "The matrix is not square: columns")

        acc = 5  #req accuracy

        for i in range(testThree.n):  #check if stochastic
            self.assertAlmostEqual(sum(testThree.gMatrix[:, i]), 1.0, acc)

        self.assertAlmostEqual(testThree.maxeig, 1.0, acc)  #check max eig is 1
        """I'm not sure what the exact expected output is but it looks reasonable to me..."""
Exemplo n.º 4
0
    def testGRankInputMatrixTwo(self):
        mat1 = [[0, 1, 0, 0, 0], [1, 0, 0, 0, 0], [0, 0, 0, 1, 0.5],
                [0, 0, 1, 0, 0.5],
                [0, 0, 0, 0,
                 0]]  #this test contains a disconnected graph (2 components)
        mat2 = ['var1', 'var2', 'var3', 'var4', 'var5']

        testTwo = gRanking(mat1, mat2)  #create

        from numpy import shape  #check if gain matrix is square
        [row, col] = shape(testTwo.gMatrix)
        self.assertNotEqual(row, testTwo.n + 1,
                            "The matrix is not square: rows")
        self.assertNotEqual(col, testTwo.n + 1,
                            "The matrix is not square: columns")

        acc = 5  #req accuracy

        for i in range(testTwo.n):  #check if stochastic
            self.assertAlmostEqual(sum(testTwo.gMatrix[:, i]), 1.0, acc)

        self.assertAlmostEqual(testTwo.maxeig, 1.0, acc)  #check max eig is 1

        expectedValues = [0.2, 0.2, 0.285, 0.285,
                          0.03]  #check if output is believable
        for calculated, expected in zip(testTwo.rankArray, expectedValues):
            self.assertAlmostEqual(calculated, expected, acc)
Exemplo n.º 5
0
    def testGRankInputMatrixOne(self):
        mat1 = [[0, 0, 1, 0.5], [1.0 / 3, 0, 0, 0],
                [1.0 / 3, 1.0 / 2, 0, 1.0 / 2], [1.0 / 3, 1.0 / 2, 0, 0]]
        mat2 = ['var1', 'var2', 'var3', 'var4']

        testOne = gRanking(mat1, mat2)

        from numpy import shape  #check if gain matrix is square
        [row, col] = shape(testOne.gMatrix)
        self.assertNotEqual(row, testOne.n + 1,
                            "The matrix is not square: rows")
        self.assertNotEqual(col, testOne.n + 1,
                            "The matrix is not square: columns")

        digits = 5  # number of digits of accuracy

        for i in range(
                testOne.n
        ):  #check if all columns sum to 1 in the gain Matrix (otherwise not-stochastic and not solution is not meaningful)
            self.assertAlmostEqual(sum(testOne.gMatrix[:, i]), 1.0, digits)

        self.assertAlmostEqual(testOne.maxeig, 1.0, digits)

        expectedvalues = [0.36815068, 0.14180936, 0.28796163, 0.20207834]

        for calculated, expected in zip(
                expectedvalues,
                testOne.rankArray):  #shouldnt this be the other way around?
            self.assertAlmostEqual(calculated, expected, digits)
Exemplo n.º 6
0
    def testNRankInputMatrixThree(self):

        mat1 = [[0, 0, 0, 0, 0, 0, 1.0 / 3, 0],
                [1.0 / 2, 0, 1.0 / 2, 1.0 / 3, 0, 0, 0, 0],
                [1.0 / 2, 0, 0, 0, 0, 0, 0, 0], [0, 1, 0, 0, 0, 0, 0, 0],
                [0, 0, 1.0 / 2, 1.0 / 3, 0, 0, 1.0 / 3, 0],
                [0, 0, 0, 1.0 / 3, 1.0 / 3, 0, 0, 1.0 / 2],
                [0, 0, 0, 0, 1.0 / 3, 0, 0, 1.0 / 2],
                [0, 0, 0, 0, 1.0 / 3, 1, 1.0 / 3, 0]]
        mat2 = ['var1', 'var2', 'var3', 'var4', 'var5', 'var6', 'var7', 'var8']
        mat3 = [1, 2, 1, 1, 2, 1, 1,
                1]  #so var2,var5 is twice as important as the other variables

        #test to see if nRanking defaults to gRanking if alpha = 0
        test = nRanking(mat1, mat2, 0,
                        mat3)  #node and gain ranking are of equal importance
        testCompare = gRanking(mat1, mat2)

        for inclIntrinsic, exclIntrinsic in zip(test.rankArray,
                                                testCompare.rankArray):
            self.assertAlmostEqual(inclIntrinsic,
                                   exclIntrinsic,
                                   msg="Does not simplify to gRanking")

        #test to see if important node is more important using this algorithm
        test = nRanking(mat1, mat2, 0.5, mat3)
        from numpy import array
        highlighted = array((array(mat3) > 1), dtype=int)
        for nrank, grank, high in zip(test.rankArray, testCompare.rankArray,
                                      highlighted):
            if (high == 1):
                self.assertGreater(
                    nrank, grank,
                    "The more important node is not more important!")
Exemplo n.º 7
0
    def testNRankInputMatrixThree(self):

        mat1 = [
            [0, 0, 0, 0, 0, 0, 1.0 / 3, 0],
            [1.0 / 2, 0, 1.0 / 2, 1.0 / 3, 0, 0, 0, 0],
            [1.0 / 2, 0, 0, 0, 0, 0, 0, 0],
            [0, 1, 0, 0, 0, 0, 0, 0],
            [0, 0, 1.0 / 2, 1.0 / 3, 0, 0, 1.0 / 3, 0],
            [0, 0, 0, 1.0 / 3, 1.0 / 3, 0, 0, 1.0 / 2],
            [0, 0, 0, 0, 1.0 / 3, 0, 0, 1.0 / 2],
            [0, 0, 0, 0, 1.0 / 3, 1, 1.0 / 3, 0],
        ]
        mat2 = ["var1", "var2", "var3", "var4", "var5", "var6", "var7", "var8"]
        mat3 = [1, 2, 1, 1, 2, 1, 1, 1]  # so var2,var5 is twice as important as the other variables

        # test to see if nRanking defaults to gRanking if alpha = 0
        test = nRanking(mat1, mat2, 0, mat3)  # node and gain ranking are of equal importance
        testCompare = gRanking(mat1, mat2)

        for inclIntrinsic, exclIntrinsic in zip(test.rankArray, testCompare.rankArray):
            self.assertAlmostEqual(inclIntrinsic, exclIntrinsic, msg="Does not simplify to gRanking")

        # test to see if important node is more important using this algorithm
        test = nRanking(mat1, mat2, 0.5, mat3)
        from numpy import array

        highlighted = array((array(mat3) > 1), dtype=int)
        for nrank, grank, high in zip(test.rankArray, testCompare.rankArray, highlighted):
            if high == 1:
                self.assertGreater(nrank, grank, "The more important node is not more important!")
Exemplo n.º 8
0
    def testNRankTestPlantFeedReactorSeparatorRecycleOutput(self):

        mat1 = [
            [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.5, 0, 0],
            [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.5, 0, 0, 0],
            [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.5, 0, 0],
            [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.5, 0, 0, 0],
            [1.0 / 3, 0, 1.0 / 3, 0, 0, 0, 1.0 / 3, 0, 0, 0, 0, 0, 0, 1.0 / 3],
            [1.0 / 3, 0, 1.0 / 3, 0, 1, 0, 1.0 / 3, 0, 0, 0, 0, 0, 0, 1.0 / 3],
            [0, 1, 0, 1, 0, 0.5, 0, 0, 0, 0, 0, 0, 1, 0],
            [1.0 / 3, 0, 1.0 / 3, 0, 0, 0.5, 0, 0, 0, 0, 0, 0, 0, 1.0 / 3],
            [0, 0, 0, 0, 0, 0, 1.0 / 3, 0, 0, 0, 0, 0, 0, 0],
            [0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0],
            [0, 0, 0, 0, 0, 0, 0, 0, 0.5, 0, 0, 0, 0, 0],
            [0, 0, 0, 0, 0, 0, 0, 0, 0, 0.5, 0, 0, 0, 0],
            [0, 0, 0, 0, 0, 0, 0, 0, 0.5, 0, 0, 0, 0, 0],
            [0, 0, 0, 0, 0, 0, 0, 0, 0, 0.5, 0, 0, 0, 0],
        ]
        mat2 = ["T1", "F1", "T2", "F2", "R1", "X1", "F3", "T3", "F4", "T4", "F6", "T6", "F5", "T5"]
        mat3 = [
            4,
            1,
            4,
            1,
            1,
            1,
            1,
            4,
            1,
            4,
            1,
            4,
            1,
            4,
        ]  # all the temperature variable are 4 times more important than the other ones (why? for safety reasons)

        # test to see if nRanking defaults to gRanking if alpha = 0
        test = nRanking(mat1, mat2, 0, mat3)  # node and gain ranking are of equal importance
        testCompare = gRanking(mat1, mat2)

        for inclIntrinsic, exclIntrinsic in zip(test.rankArray, testCompare.rankArray):
            self.assertAlmostEqual(inclIntrinsic, exclIntrinsic, msg="Does not simplify to gRanking")

        # test to see if important node is more important using this algorithm
        test = nRanking(mat1, mat2, 0.5, mat3)
        from numpy import array

        highlighted = array((array(mat3) > 1), dtype=int)
        for nrank, grank, high in zip(test.rankArray, testCompare.rankArray, highlighted):
            if high == 1:
                self.assertGreater(nrank, grank, "The more important node is not more important!")
Exemplo n.º 9
0
    def testNRankTestPlantFeedReactorSeparatorRecycleOutput(self):

        mat1 = [
            [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
             0.5, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.5, 0, 0, 0],
            [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.5, 0,
             0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.5, 0, 0,
                  0],
            [1.0 / 3, 0, 1.0 / 3, 0, 0, 0, 1.0 / 3, 0, 0, 0, 0, 0, 0, 1.0 / 3],
            [1.0 / 3, 0, 1.0 / 3, 0, 1, 0, 1.0 / 3, 0, 0, 0, 0, 0, 0, 1.0 / 3],
            [0, 1, 0, 1, 0, 0.5, 0, 0, 0, 0, 0, 0, 1, 0],
            [1.0 / 3, 0, 1.0 / 3, 0, 0, 0.5, 0, 0, 0, 0, 0, 0, 0, 1.0 / 3],
            [0, 0, 0, 0, 0, 0, 1.0 / 3, 0, 0, 0, 0, 0, 0, 0],
            [0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0],
            [0, 0, 0, 0, 0, 0, 0, 0, 0.5, 0, 0, 0, 0, 0],
            [0, 0, 0, 0, 0, 0, 0, 0, 0, 0.5, 0, 0, 0, 0],
            [0, 0, 0, 0, 0, 0, 0, 0, 0.5, 0, 0, 0, 0, 0],
            [0, 0, 0, 0, 0, 0, 0, 0, 0, 0.5, 0, 0, 0, 0]
        ]
        mat2 = [
            "T1", "F1", "T2", "F2", "R1", "X1", "F3", "T3", "F4", "T4", "F6",
            "T6", "F5", "T5"
        ]
        mat3 = [
            4, 1, 4, 1, 1, 1, 1, 4, 1, 4, 1, 4, 1, 4
        ]  #all the temperature variable are 4 times more important than the other ones (why? for safety reasons)

        #test to see if nRanking defaults to gRanking if alpha = 0
        test = nRanking(mat1, mat2, 0,
                        mat3)  #node and gain ranking are of equal importance
        testCompare = gRanking(mat1, mat2)

        for inclIntrinsic, exclIntrinsic in zip(test.rankArray,
                                                testCompare.rankArray):
            self.assertAlmostEqual(inclIntrinsic,
                                   exclIntrinsic,
                                   msg="Does not simplify to gRanking")

        #test to see if important node is more important using this algorithm
        test = nRanking(mat1, mat2, 0.5, mat3)
        from numpy import array
        highlighted = array((array(mat3) > 1), dtype=int)
        for nrank, grank, high in zip(test.rankArray, testCompare.rankArray,
                                      highlighted):
            if (high == 1):
                self.assertGreater(
                    nrank, grank,
                    "The more important node is not more important!")
Exemplo n.º 10
0
    def testGRankInputMatrixThree(self):
        mat1 = [[0,0,0,0,0,0,1.0/3,0],[1.0/2,0,1.0/2,1.0/3,0,0,0,0],[1.0/2,0,0,0,0,0,0,0],[0,1,0,0,0,0,0,0],[0,0,1.0/2,1.0/3,0,0,1.0/3,0],[0,0,0,1.0/3,1.0/3,0,0,1.0/2],[0,0,0,0,1.0/3,0,0,1.0/2],[0,0,0,0,1.0/3,1,1.0/3,0]]
        mat2 = ['var1','var2','var3','var4','var5','var6','var7','var8']
        
        testThree = gRanking(mat1,mat2) #create
        
        from numpy import shape #check if gain matrix is square
        [row,col] = shape(testThree.gMatrix) 
        self.assertNotEqual(row, testThree.n+1, "The matrix is not square: rows")
        self.assertNotEqual(col, testThree.n+1, "The matrix is not square: columns")        
        
        acc = 5 #req accuracy
 
        for i in range(testThree.n): #check if stochastic
            self.assertAlmostEqual(sum(testThree.gMatrix[:,i]),1.0,acc)
            
        self.assertAlmostEqual(testThree.maxeig, 1.0, acc) #check max eig is 1
       
        """I'm not sure what the exact expected output is but it looks reasonable to me..."""
Exemplo n.º 11
0
    def __init__(self,
                 variables,
                 localdiff,
                 numberofinputs,
                 fgainmatrix,
                 fconnectionmatrix,
                 fvariablenames,
                 bgainmatrix,
                 bconnectionmatrix,
                 bvariablenames,
                 normalgains,
                 normalconnections,
                 controlvarsforRGA=None):
        """This constructor will create an RGABristol object so that you simply
        have to call the display method to see which pairings should be made.

        It will also create 6 different ranking systems. Note that variablenames
        is not the same as variables!! There is a formatting difference.

        ASSUME: the first rows are the inputs up to numberofinputs"""

        self.bristol = RGA(variables, localdiff, numberofinputs,
                           controlvarsforRGA)

        self.forwardgain = gRanking(self.normaliseMatrix(fgainmatrix),
                                    fvariablenames)
        self.gfgain = gRanking(self.normaliseMatrix(fconnectionmatrix),
                               fvariablenames)

        self.backwardgain = gRanking(self.normaliseMatrix(bgainmatrix),
                                     bvariablenames)
        self.gbgain = gRanking(self.normaliseMatrix(bconnectionmatrix),
                               bvariablenames)

        self.normalforwardgain = gRanking(self.normaliseMatrix(normalgains),
                                          variables)
        self.normalbackwardgain = gRanking(
            self.normaliseMatrix(transpose(normalgains)), variables)
        self.normalforwardgoogle = gRanking(
            self.normaliseMatrix(normalconnections), variables)

        self.listofinputs = variables[:numberofinputs]
        self.listofoutputs = variables[numberofinputs:]
Exemplo n.º 12
0
    def testGRankInputMatrixTwo(self):
        mat1 = [[0,1,0,0,0],[1,0,0,0,0],[0,0,0,1,0.5],[0,0,1,0,0.5],[0,0,0,0,0]] #this test contains a disconnected graph (2 components)
        mat2 = ['var1','var2','var3','var4','var5']
        
        testTwo = gRanking(mat1,mat2) #create
        
        from numpy import shape #check if gain matrix is square
        [row,col] = shape(testTwo.gMatrix) 
        self.assertNotEqual(row, testTwo.n+1, "The matrix is not square: rows")
        self.assertNotEqual(col, testTwo.n+1, "The matrix is not square: columns")        
        
        acc = 5 #req accuracy
 
        for i in range(testTwo.n): #check if stochastic
            self.assertAlmostEqual(sum(testTwo.gMatrix[:,i]),1.0,acc)
            
        self.assertAlmostEqual(testTwo.maxeig, 1.0, acc) #check max eig is 1
       
        expectedValues = [0.2,0.2,0.285,0.285,0.03] #check if output is believable
        for calculated, expected in zip(testTwo.rankArray, expectedValues):
            self.assertAlmostEqual(calculated, expected, acc)
Exemplo n.º 13
0
    def testGRankInputMatrixOne(self):
        mat1 = [[0,0,1,0.5],[1.0/3,0,0,0],[1.0/3,1.0/2,0,1.0/2],[1.0/3,1.0/2,0,0]]
        mat2 = ['var1','var2','var3','var4']

        testOne = gRanking(mat1,mat2)
        
        from numpy import shape #check if gain matrix is square
        [row,col] = shape(testOne.gMatrix) 
        self.assertNotEqual(row, testOne.n+1, "The matrix is not square: rows")
        self.assertNotEqual(col, testOne.n+1, "The matrix is not square: columns")
        
        digits = 5 # number of digits of accuracy
        
        for i in range(testOne.n): #check if all columns sum to 1 in the gain Matrix (otherwise not-stochastic and not solution is not meaningful)
            self.assertAlmostEqual(sum(testOne.gMatrix[:,i]),1.0,digits)        
        
        self.assertAlmostEqual(testOne.maxeig, 1.0, digits)

        expectedvalues = [ 0.36815068,  0.14180936,  0.28796163,  0.20207834]
        
        for calculated, expected in zip(expectedvalues, testOne.rankArray): #shouldnt this be the other way around?
            self.assertAlmostEqual(calculated, expected, digits)
Exemplo n.º 14
0
    def __init__(self, variables, localdiff, numberofinputs, fgainmatrix, fconnectionmatrix, fvariablenames, bgainmatrix, bconnectionmatrix, bvariablenames, normalgains, normalconnections, controlvarsforRGA=None):
        """This constructor will create an RGABristol object so that you simply
        have to call the display method to see which pairings should be made.

        It will also create 6 different ranking systems. Note that variablenames
        is not the same as variables!! There is a formatting difference.

        ASSUME: the first rows are the inputs up to numberofinputs"""

        self.bristol = RGA(variables, localdiff, numberofinputs, controlvarsforRGA)

        self.forwardgain = gRanking(self.normaliseMatrix(fgainmatrix), fvariablenames)
        self.gfgain = gRanking(self.normaliseMatrix(fconnectionmatrix), fvariablenames)

        self.backwardgain = gRanking(self.normaliseMatrix(bgainmatrix), bvariablenames)
        self.gbgain = gRanking(self.normaliseMatrix(bconnectionmatrix), bvariablenames)

        self.normalforwardgain = gRanking(self.normaliseMatrix(normalgains), variables)
        self.normalbackwardgain = gRanking(self.normaliseMatrix(transpose(normalgains)), variables)
        self.normalforwardgoogle = gRanking(self.normaliseMatrix(normalconnections), variables)

        self.listofinputs = variables[:numberofinputs]
        self.listofoutputs = variables[numberofinputs:]
Exemplo n.º 15
0
@author: St Elmo Wilken
"""

from localGainCalculator import localgains
from gainRank import gRanking
from RGABristol import RGA
import numpy as np
from numpy import array, zeros

test1 = localgains("connectionsTE.csv", "scaledinputs005h5.txt", 13)
gainm = test1.normaliseGainMatrix(test1.linlocalgainmatrix)
varm = test1.variables
googlem = test1.normaliseGainMatrix(test1.connectionmatrix)

test2 = gRanking(gainm, varm)

test2.showConnectRank()
gainnc = array(test2.rankArray).reshape(-1,1)

test2 = gRanking(googlem, varm)
test2.showConnectRank()

googlenc = array(test2.rankArray).reshape(-1,1)

rationc = array(gainnc/googlenc).reshape(-1,1)

"""**************"""

test1 = localgains("connectionsTEcontrol.csv", "scaledcontrol.txt", 21)
Exemplo n.º 16
0
@author: St Elmo Wilken
"""

from localGainCalculator import localgains
from gainRank import gRanking
from RGABristol import RGA
import numpy as np

import matplotlib.pyplot as plt

test1 = localgains("testFourConnections.csv", "testFourIG.txt", 5)
gainm = test1.normaliseGainMatrix(test1.linlocalgainmatrix)
varm = test1.variables
googlem = test1.normaliseGainMatrix(test1.connectionmatrix)
test2 = gRanking(googlem, varm)

#print(test2.sortedRankingsKey)
#print(test2.sortedRankingsValue)
#test2.showConnectRank()

localdiffsm = test1.localdiffmatrix
test3 = RGA(varm, localdiffsm, 5, 3) #remember to change me for each case!!!
#print(test3.pairedvariables)
haha = test3.bristolmatrix
print(haha)
#print(test3.openloopmatrix)
np.savetxt("rgatest.txt",haha)

plt.matshow(haha)
plt.show()
Exemplo n.º 17
0
                       style='solid',
                       alpha=0.2)
nx.draw_networkx_nodes(G, pos=posdict, node_color='y', node_size=900)
plt.axis('off')
plt.ylabel("Separation point = 0.5")

plt.figure("RGA")
plt.imshow(bristol.bristolmatrix, interpolation='nearest',
           extent=[0, 1, 0, 1])  #need to fix this part!!! it looks ugly
plt.axis('off')
plt.colorbar()
"""************************************************************************************************************************************"""
"""Eigenvector Approach Time"""
"""Local Gain Ranking"""

forwardgain = gRanking(localdata.normaliseGainMatrix(localgainmatrix),
                       variablenames)
backwardgain = gRanking(
    localdata.normaliseGainMatrix(transpose(localgainmatrix)), variablenames)
gfgain = gRanking(localdata.normaliseGainMatrix(connectionmatrix),
                  variablenames)
gbgain = gRanking(localdata.normaliseGainMatrix(transpose(connectionmatrix)),
                  variablenames)

frankdict = forwardgain.rankDict
brankdict = backwardgain.rankDict

gfdict = gfgain.rankDict
gbdict = gbgain.rankDict

plt.figure("Weight")
H = nx.DiGraph()
Exemplo n.º 18
0
nx.draw_networkx_edges(G,pos=posdict,width=5.0,edge_color=edgecolorlist, style='solid',alpha=0.5)
nx.draw_networkx_nodes(G,pos=posdict, node_color='y',node_size=900)
plt.axis('off')
plt.ylabel("Separation point = 0.5")

plt.figure("RGA")
plt.imshow(bristol.bristolmatrix, interpolation='nearest',extent=[0,1,0,1]) #need to fix this part!!! it looks ugly
plt.axis('off')
plt.colorbar()

"""************************************************************************************************************************************"""

"""Eigenvector Approach Time"""
"""Local Gain Ranking"""

forwardgain = gRanking(localdata.normaliseGainMatrix(localgainmatrix), variablenames)
backwardgain = gRanking(localdata.normaliseGainMatrix(transpose(localgainmatrix)),variablenames)
gfgain = gRanking(localdata.normaliseGainMatrix(connectionmatrix), variablenames)
gbgain = gRanking(localdata.normaliseGainMatrix(transpose(connectionmatrix)),variablenames)

frankdict = forwardgain.rankDict
brankdict = backwardgain.rankDict

gfdict = gfgain.rankDict
gbdict = gbgain.rankDict

print(frankdict)
print(brankdict)
print(gfdict)
print(gbdict)
Exemplo n.º 19
0
@author: St Elmo Wilken
"""

from localGainCalculator import localgains
from gainRank import gRanking
from RGABristol import RGA
import numpy as np
from numpy import array, zeros

test1 = localgains("connectionsTE.csv", "scaledinputs005h5.txt", 13)
gainm = test1.normaliseGainMatrix(test1.linlocalgainmatrix)
varm = test1.variables
googlem = test1.normaliseGainMatrix(test1.connectionmatrix)

test2 = gRanking(gainm, varm)

test2.showConnectRank()
gainnc = array(test2.rankArray).reshape(-1, 1)

test2 = gRanking(googlem, varm)
test2.showConnectRank()

googlenc = array(test2.rankArray).reshape(-1, 1)

rationc = array(gainnc / googlenc).reshape(-1, 1)
"""**************"""

test1 = localgains("connectionsTEcontrol.csv", "scaledcontrol.txt", 21)

gainm = test1.normaliseGainMatrix(test1.linlocalgainmatrix)