Пример #1
0
def find_missing_points(Punkte1, FeldName, Punkte2):
    """ Function returning points that are in data set **Punkte1** but not in **Punkte2**.  
    The attribute label that is being used to compare the to data sets is given through **FeldName**.

    \n.. comments: 
    Input:
        Punkte1:        List of points (first data set)
        FeldName:       String, which is link between the two data sets. Currently only "name" implemented
        Punkte2:         List of points (second data set)
    Return:
        FehlendePunkte:  List von String von (Punkte.name).
    """

    # Initializierung von Variabeln
    FehlendePunkte = []
    if FeldName.lower() == "name":
        for punke in Punkte1:
            pos = M_FindPos.find_pos_StringInList(punke.name, Punkte2)
            if len(pos) == 0:
                FehlendePunkte.append(punke.name)
    elif FeldName.lower() == "node_id":
        for punke in Punkte1:
            pos =  M_FindPos.find_pos_StringInList(punke.node_id, Punkte2)
            if len(pos) == 0:
                FehlendePunkte.append(punke.node_id)
    else:
        print('ERROR: ' + sys.argv[0] + '.find_missing_points: code not written yet!')
        raise 
    
    return FehlendePunkte    
Пример #2
0
def read_CSV(CSV_Path):
    """Description:
    ------------
        Reads Data from folder CSV_Path into Grid 
        Grid = Instance of Netz Class
        
    Input Parameter:
    ----------------
        CSV_Path        string containing path name of data location
        
    Return Parameters:
    ------------------
        Grid            instance of class K_Netze.Netz, populated with 
                         data from CSV files"""

    FileList = [
        'BorderPoints', 'PipePoints', 'Compressors', 'Nodes', 'EntryPoints',
        'InterConnectionPoints', 'LNGs', 'Meta_BorderPoints',
        'Meta_Compressors', 'Meta_EntryPoints', 'Meta_InterConnectionPoints',
        'Meta_LNGs', 'Meta_PipePoints', 'Meta_Storages', 'Storages'
    ]

    print('')
    print(CC.Caption + 'Load CSV-Data into Grid' + CC.End)
    print('--------------------------------------')

    Grid = K_Netze.NetComp()
    Grid.Processes.append(K_Netze.Processes('M_CSV.read_CSV'))

    for filename in os.listdir(CSV_Path):
        # check if Filename is used for Import
        for key in FileList:
            if 'Meta_' in key:
                filename = key + '.csv'
            else:
                filename = 'Gas_' + key + '.csv'
            CSV_File = os.path.join((CSV_Path), (filename))
            Z = CSV_2_list(CSV_File)
            if len(Z) > 0:
                for entry in Z:
                    Keys = list(entry.keys())
                    Vals = list(entry.values())
                    posId = M_FindPos.find_pos_StringInList('id', Keys)
                    posName = M_FindPos.find_pos_StringInList('name', Keys)
                    del entry['id']
                    del entry['name']
                    Grid.__dict__[key].append(K_Netze.__dict__[key](
                        id=Vals[posId[0]], name=Vals[posName[0]], param=entry))
            else:
                Grid.__dict__[key] = []

    return Grid
Пример #3
0
def changeCountryCode(Netz, RelDirName  = 'LKD_CountryCodeChanges.csv'):
    """Changes some pipe Segments based on an input CSV file
    """
    
    if os.path.exists(RelDirName):
        
        # getting all new node ideas were to chnage counttry code
        fid = open(RelDirName, 'r', encoding="utf-8", errors = "ignore")
        # Read header line
        fid.readline()
        
        csv_reader  = csv.reader(fid, delimiter = ";")
        
        allPipeID   = []
        allCC       = []
        for row in csv_reader:
            allPipeID.append(str(row[0]))
            allCC.append(row[1])


        # going through each element in Netz and change countrycode
        for comp in Netz.CompLabels():
            for ii, elem in enumerate(Netz.__dict__[comp]):
                if isinstance(elem.node_id, list):
                    for jj, elemId in enumerate(elem.node_id):
                        pos = M_FindPos.find_pos_StringInList(str(elemId), allPipeID)
                        if len(pos) == 1:
                            if isinstance(elem.country_code, list):
                                elem.country_code[jj]  = allCC[pos[0]]
                            else:
                                elem.country_code  = allCC[pos[0]]
                else:
                    pos = M_FindPos.find_pos_StringInList(str(elem.node_id), allPipeID)
                    if len(pos) == 1:
                        elem.country_code  = allCC[pos[0]]
        
    return Netz
Пример #4
0
def join_PipeLine_Meta(Elemente, Meta_Elemente, Meta_Namen, Meta_Typen,
                       Method_Name):
    """ Function to join elements (**Elemente**) with meta data of elements **Meta_Elemente**.  

    \n.. comments: 
    Input:
        Elemente:            Gas Netzwerk elements (topological information)
        Meta_Elemente:       Information from Meta data for PipeLines 
        Meta_Namen:          Variable names of Meta_Elemente
        Meta_Typen:          List of strings indicating the type of data
        Method_Name:         List of strings, containing indicator if column is to be stored in Param dict
    Return:
        Elemente:            Gas Netzwerk elements linked to the Meta data.
    """

    # Initializierung von Variabeln
    Meta_comp_ids = M_Helfer.get_attribFromList(Meta_Elemente, 'comp_id')
    countEle = 0
    posKeep = []
    try:
        for ele in Elemente:
            dieserWert = ele.param['meta_id']
            pos = M_FindPos.find_pos_StringInList(dieserWert, Meta_comp_ids)
            if len(pos) > 0:
                posKeep.append(countEle)
                for idx, metName in enumerate(Meta_Namen):
                    if metName != 'comp_id' and metName != 'id':
                        if len(Method_Name[idx]) > 0:
                            Elemente[countEle].param.update({
                                metName:
                                getattr(Meta_Elemente[pos[0]], metName)
                            })
                        else:
                            setattr(Elemente[countEle], metName,
                                    getattr(Meta_Elemente[pos[0]], metName))

            countEle = countEle + 1

        Temp = K_Netze.NetComp()
        Temp.Temp = Elemente
        Temp.select_byPos('Temp', posKeep)
        Elemente = Temp.Temp

    except:
        print("ERROR: M_Verknuepfe.join_Component_Meta")
        raise

    return Elemente
Пример #5
0
def unique_String(Punkte):
    """ Function returning a list of unique string from input **Punkte**, using 
    function M_FindPos.find_pos_StringInList.  Data supplied through **Punkte**.

    \n.. comments:    
    Input:
        Punkte          Liste of type Gas_Klassen_Netz.Punkt
    Output:
        Punkte_Return   Liste of type String based on Punkte.name.
    """

    Punkte_Return = []
    for DieserPunkt in Punkte:
        pos = M_FindPos.find_pos_StringInList(DieserPunkt, Punkte_Return)
        if len(pos) == 0:
            Punkte_Return.append(DieserPunkt)

    return Punkte_Return
Пример #6
0
def getMatch_LatLong_Threshold(comp_0, comp_1, methodVal = 50000):
    """Gets the separation between two points, and then checks if distance is 
    smaller than **methodVal**.  If Trued, then returns 100, if false, then returns 0
    """
    
    # Initialization
    RetVal  = 0
    
    # Netz_0 is empty
    if comp_0 == '':
        pass
        
    # Netz_1 is empty
    elif comp_1 == '':
        pass
    
    elif comp_0.long == None:
        pass
    
    elif comp_1.long == None:
        pass

    # Both Netze contain components  
    else:
        # Creation of LatLong "vector" from component latlong
        latlong_Netz_0  = K_Comp.PolyLine(lat = [comp_0.lat], long = [comp_0.long] ) #M_Netze.get_latlongPairs_Points(comp_0)
        thisLatLong     = K_Comp.PolyLine(lat = comp_1.lat, long = comp_1.long ) #M_Netze.get_latlongPairs_Points(comp_1)
        
        [pos, minVal]       = M_FindPos.find_pos_closestLatLongInList(thisLatLong, latlong_Netz_0)
        
        if math.isnan(minVal):
            RetVal = 0
        elif minVal <= methodVal:
            RetVal = 100
        else:
            RetVal = 0
                
    # Testig if nan, if so then set to zero
    if math.isnan(RetVal) :
        RetVal = 0
                
    return RetVal
Пример #7
0
def Test_Histogram(Graph_MD):
    """ Returns histrogram for multi-directinal network graph **Graph_MD**.
    
    \n.. comments:
    Input:
        Graph_MD        Instance of NX multi-directinal graph
    Return:
        HistSegmKnoten  Vektor, mit Werten
    
    """

    Edges = NX.edges(Graph_MD)
    KnotenNamen = NX.nodes(Graph_MD)

    KnotenNamenListe = M_Helfer.unique_String(KnotenNamen)
    NumKnotenListe = len(KnotenNamenListe)
    KnotenLeitung = arr.array('i', list(range(1, NumKnotenListe + 1)))

    count = 0
    for Knoten in KnotenLeitung:
        KnotenLeitung[count] = 0
        count = count + 1

    for ii in list(range(NumKnotenListe)):
        KnotenName = KnotenNamenListe[ii]
        for edge in Edges:
            posS = edge[0] == KnotenName
            posE = edge[1] == KnotenName

            if posS:
                KnotenLeitung[ii] = KnotenLeitung[ii] + 1
            if posE:
                KnotenLeitung[ii] = KnotenLeitung[ii] + 1

    MaxKnotenLeitung = max(KnotenLeitung)
    HistSegmKnoten = M_MatLab.zeros('i', MaxKnotenLeitung + 1)

    for ii in list(range(0, MaxKnotenLeitung + 1)):
        HistSegmKnoten[ii] = len(
            M_FindPos.find_pos_ValInVector(ii, KnotenLeitung, '=='))

    return HistSegmKnoten
Пример #8
0
def sort_Vector(vecIn):
    """ Sorting a list of values
    
    \n.. comments: 
    Input:
        vecIn:       list of floats/int
    Output:
        RetList:      list of sorted values
        PosList:      list of Positions
    """

    vecin = copy.deepcopy(vecIn)

    PosList = []
    ReTransform = False
    if isinstance(vecin[0], int):
        ReTransform = True
        for ii in range(len(vecin)):
            vecin[ii] = float(vecin[ii])

    # Sorting data
    RetList = sorted([x for x in vecin if not math.isnan(x)], reverse=False)

    # determen position values
    for ii in range(len(RetList)):
        pos = M_FindPos.find_pos_ValInVector(RetList[ii], vecin, '==')
        PosList.append(pos[0])
        vecin[pos[0]] = np.inf

    # Transform back to Int if required
    if ReTransform == True:
        for ii in range(len(RetList)):
            RetList[ii] = int(RetList[ii])

    else:
        print('ERROR: M_MatLab.sort_Vector: code not written yet')

    return RetList, PosList
Пример #9
0
def get_Degree(Graph_MD):
    """ Returning degrees as type array from multi-directional network graph **Graph_MD**. 	
	
    \n.. comments:
    Input:
        Graph_MD     Instance of NX multi-directional graph
    Return:
        ReturnVal:   The degree of the network
    """

    AllEdges = NX.edges(Graph_MD)
    Punkte = NX.nodes(Graph_MD)

    # Initializierung von Variabeln
    Degree = arr.array('i', list(range(len(Punkte))))
    count = 0

    for Punkt in Punkte:
        pos = M_FindPos.find_pos_StringInTuple(Punkt, AllEdges)
        Degree[count] = len(pos)
        count = count + 1

    return Degree
Пример #10
0
def leseSQL_Punkte(InfoSQL, TabellenName):
    """
    Liest Punkte Tabellen from SQL data base
    
    Eingabe:
        InfoSQL         Strukture von SQL DatenBank Zuganz Daten
        TabellenName    String, von TabellenNamen, via InfoSQL[TabellenName] !!
    Ausgabe:
        Punkte          Liste von Punkten von Klasse NetzKlassen.Nodes()

    """
    Punkte = []
    # Ueberpruefe das Tabelle in DataBank ist
    AlleTabellenNamen = getAllTableNames(InfoSQL)
    Name = InfoSQL['IO'][TabellenName]
    if len(M_FindPos.find_pos_StringInList(Name, AlleTabellenNamen)):
        con = connect(dbname = InfoSQL['IO']["DataBAseName"],  user = InfoSQL['IO']["User"],  host = InfoSQL['IO']["Host"],  port = int(InfoSQL['IO']["Port"]),  password = InfoSQL['IO']["PassWord"])
        cur = con.cursor()
        CurString = "SELECT * FROM " + InfoSQL['IO'][TabellenName]
        cur.execute(CurString)
        TabPunkte = cur.fetchall()
        cur.close()
        con.close()
        count = 0
        for tab in TabPunkte:
            id    = tab[0]
            name  = tab[1]
            lat   = tab[5]
            long  = tab[6]
            land  = tab[4]
            Punkte.append(K_Netze.Nodes(id = id, name = name,  lat = lat, long = long, country_code = land, comment = None, param = {}))
            count = count + 1
    
    
    return Punkte
	
	
Пример #11
0
def leseSQL_Meta(InfoSQL, TabellenName):
    """
    Lies Meta-Data from Tabellen from SQL data base

    Eingabe:
        InfoSQL         Strukture von SQL DatenBank Zuganz Daten
        TabellenName    String, von TabellenNamen, via InfoSQL[TabellenName] !!
    Ausgabe:
        MetaData        Meta data aus der Tabelle, in form eines Dict
        MetaType        Daten Type fuer die Spalten der Meta Daten
        ColumnNames     Name der Spalten der Meta Daten
    
    """
    
    MetaData = []
    MetaType = []
    
    # Ueberpruefe das Tabelle in DataBank ist
    AlleTabellenNamen = getAllTableNames(InfoSQL)
    Name = InfoSQL['IO'][TabellenName]
    if len(M_FindPos.find_pos_StringInList(Name, AlleTabellenNamen)):
        con = connect(dbname = InfoSQL['IO']["DataBaseName"],  user = InfoSQL['IO']["User"],  host = InfoSQL['IO']["Host"],  port = int(InfoSQL['IO']["Port"]),  password = InfoSQL['IO']["PassWord"])
        cur = con.cursor()
        CurString = "SELECT * FROM " + InfoSQL['IO'][TabellenName]
        cur.execute(CurString)
    
        MetaPunkte = cur.fetchall()

        cur.close()
        con.close()
    else:
        return MetaData
        
    # Lese SpaltenNamen ein
    con = connect(dbname = InfoSQL['IO']["DataBaseName"],  user = InfoSQL['IO']["User"],  host = InfoSQL['IO']["Host"],  port = int(InfoSQL['IO']["Port"]),  password = InfoSQL['IO']["PassWord"])
    cur = con.cursor()
    CurString = "select * from INFORMATION_SCHEMA.COLUMNS where TABLE_NAME = '" + InfoSQL['IO'][TabellenName] + "'"
    CurString = "select * from " + InfoSQL['IO'][TabellenName] + " where 0=1"
    CurString = "select column_name, data_type from information_schema.columns where table_name = '" + InfoSQL['IO'][TabellenName] + "'"
    cur.execute(CurString)
    
    ColumnNames = []
    MetaTypeRaw = []
    ColumnNamesTuple = cur.fetchall()
    for name in ColumnNamesTuple:
        ColumnNames.append(name[0])
        MetaTypeRaw.append(name[1])
        
        
    cur.close()
    con.close()
    
    # Kreieren of Liste von MetaData
    count   = 0
    
    PassVall = []
#    for ii in list(range(len(MetaPunkte))):
#        PassVall.append(MetaPunkte[ii][count])
    
    
    
    for dicName in ColumnNames:
        for ii in list(range(len(MetaPunkte))):
            PassVall.append(MetaPunkte[ii][count])
        if count == 0:
            MetaData= {dicName: PassVall}
        else:
            MetaData[dicName] = PassVall
            
        count = count + 1
        
    MetaType      = [typemap[typename] for typename in MetaTypeRaw]
    
    return [MetaData, MetaType, ColumnNames]
Пример #12
0
def find_Match_Attrib(Netz_1,
                      CompName_1,
                      AttribName_1,
                      Netz_2,
                      CompName_2,
                      AttribName_2,
                      SearchOption='single',
                      CountryOn=False,
                      AddInWord=0,
                      String2Lower=False):
    """ Finds a vector containing the positions of which EntsoG component should be linked with which 
    point from Netz class instance. The following attributes are currently implemented: name, lat, 
    and long.  Input to method are **EntsoGCompName**, **Netz** instance, **NetzCompName**, 
    **multDist**, **testRun**, **powerVal**.  Return are the position lists for the EntsoG 
    instance, the Netz instance, and the Goodness value (ranging 0..1).
    
    \n.. comments: 
    Input:
        Netz_1:          Netz Class instance
        CompName_1:      string, of Netz_1 component name, to be used.  
        Netz_2:          instance of class Netz
        CompName_2:      string, of Netz_2 component name, to be used.  
        SearchOption     string indicating if entries from Netz_2 are allowed to be used multiple times,... 'multi', 'single', 
        CountryOn        [False], compares only locations in the same contry
        AddInWord        [0], adds value if name of one set is in name of other set.
        String2Lower     [False], if strings shall be converted to lower
    Return:  
        posEntsoG:       List of ints, of positions from EntsoG  
        posNetz:         List of ints, of positions from Netz  
        GoodnessVal:     list of floats, of goodness values  
    """

    # Selecting the dat based on Component from EntsoG
    Comp_1 = Netz_1.__dict__[CompName_1]

    # Selecting the dat based on Component from Netze
    Comp_2 = Netz_2.__dict__[CompName_2]

    # Initialization of variables
    pos_1 = []
    pos_2 = []
    GoodnessVal = []
    posLeft_1 = [s for s in range(len(Comp_1))]
    posLeft_2 = [s for s in range(len(Comp_2))]

    Run_1 = True
    Run_2 = True

    # Dealing with country subset
    if CountryOn:
        Country_1 = M_Helfer.get_NotPos(Comp_1, pos_1, 'country_code')
        Country_2 = M_Helfer.get_NotPos(Comp_2, pos_2, 'country_code')
        Country_Matrix_Orig = M_Helfer.get_NameMatrix_Fuzzy(
            Country_1, Country_2)
        for xx in range(len(Country_Matrix_Orig)):
            for yy in range(len(Country_Matrix_Orig[0])):
                if Country_Matrix_Orig[xx][yy] >= 100:
                    Country_Matrix_Orig[xx][yy] = 1
                elif Country_1[xx] == None or Country_2[yy] == None:
                    Country_Matrix_Orig[xx][yy] = 1
                else:
                    Country_Matrix_Orig[xx][yy] = 0
    else:
        Country_1 = M_Helfer.get_NotPos(Comp_1, pos_1, 'country_code')
        Country_2 = M_Helfer.get_NotPos(Comp_2, pos_2, 'country_code')
        Country_Matrix_Orig = M_Helfer.get_NameMatrix_Fuzzy(
            Country_1, Country_2)
        for xx in range(len(Country_Matrix_Orig)):
            for yy in range(len(Country_Matrix_Orig[0])):
                Country_Matrix_Orig[xx][yy] = 1

    if String2Lower:
        print('change code')

    # Running through data set for first time, to catch all locations, where name is totally same
    Name_1 = M_Helfer.get_NotPos(Comp_1, pos_1, AttribName_1)
    Name_2 = M_Helfer.get_NotPos(Comp_2, pos_2, AttribName_2)

    # Getting matching location names
    [New_pos_1, New_pos_2] = M_Helfer.get_NameMatch(Name_1, Name_2)

    # Generating un-shrunk data for later
    Orig_Name_1 = M_Helfer.get_NotPos(Comp_1, [], AttribName_1)
    Orig_Name_2 = M_Helfer.get_NotPos(Comp_2, [], AttribName_2)

    Name_Matrix_Orig = M_Helfer.get_NameMatrix_Fuzzy(Orig_Name_1, Orig_Name_2,
                                                     AddInWord)
    Name_Matrix_Orig = M_MatLab.multi_2Matrix(Name_Matrix_Orig,
                                              Country_Matrix_Orig)

    # Combining matrixes
    GoodnessMatrix_Orig = Name_Matrix_Orig
    # Now going through the rest of the data set
    while Run_2 and Run_1:
        GoodnessMatrix_Shrunk = M_MatLab.shrink_Matrix(GoodnessMatrix_Orig,
                                                       posLeft_1, posLeft_2)

        # determin popsitions in shrunk data sets
        [pos_Shrunk_1, pos_Shrunk_2
         ] = M_FindPos.find_pos_ConditionInMatrix(GoodnessMatrix_Shrunk, 'max')

        GoodnessVal.append(GoodnessMatrix_Shrunk[pos_Shrunk_1][pos_Shrunk_2])
        # dtermin position in original data sets
        pos_Orig_1 = posLeft_1[pos_Shrunk_1]
        pos_Orig_2 = posLeft_2[pos_Shrunk_2]

        pos_1.append(pos_Orig_1)
        posLeft_1.remove(pos_Orig_1)
        pos_2.append(pos_Orig_2)
        if 'single' in SearchOption:
            posLeft_2.remove(pos_Orig_2)

        # Check if need to stop
        if len(pos_1) == len(Comp_1):
            Run_1 = False
        if len(pos_2) == len(Comp_2):
            Run_2 = False

    return [pos_1, pos_2, GoodnessVal]
Пример #13
0
def getMatch_LatLong(comp_0, comp_1, method = 'inv'):
    """Gets the separation between two points in km, and returns as 100-1/distance and other methods.  
    Method allows to select different measures of distance returned (distance used here in [km]), from: 
    "inv"       (100 / distance), 
    "power2inv" (100 / (distance^2)), 
    "loginv"    (100 / log(distance), with base e),
    "log10inv"  (100 / log10(distance), with base 10),
    "distance" (distance)
    """
    
    # Initialization
    RetVal = 0
    
    # Netz_0 is empty
    if comp_0 == '':
        return  0
        
    # Netz_1 is empty
    elif comp_1 == '':
        return  0
    
    elif comp_0.long == None:
        return  0
    
    elif comp_1.long == None:
        return  0
    
    elif type(comp_0.lat) == str:
        print('ERROR: M_Matching.getComp_LatLong: input type is string.  Float expected. comp_0')
        
    elif type(comp_1.lat) == str:
        print('ERROR: M_Matching.getComp_LatLong: input type is string.  Float expected. comp_1')

    # Both Netze contain components  
    else:
        # Creation of LatLong "vector" from component latlong
        latlong_Netz_0  = K_Comp.PolyLine(lat = [comp_0.lat], long = [comp_0.long] ) #M_Netze.get_latlongPairs_Points(comp_0)
        thisLatLong     = K_Comp.PolyLine(lat = comp_1.lat, long = comp_1.long ) #M_Netze.get_latlongPairs_Points(comp_1)
        
        [pos, minVal]   = M_FindPos.find_pos_closestLatLongInList(thisLatLong, latlong_Netz_0)
        minVal          = minVal/1000
        
        if minVal == 0.0:
            RetVal =  100
        elif method == 'inv':
            RetVal = min([100 / minVal, 100])
            
        elif method == 'power2inv':
            RetVal = 100 / minVal/minVal
            
        elif method == 'log10inv':
            RetVal = 100 / math.log(minVal, 10)
            
        elif method == 'loginv':
            RetVal = 100 / math.log(minVal)
            
        elif method == 'distance':
            RetVal = minVal
        else:
            print('ERROR: M_Matching: get_Comp_LatLong: method not defined.')
            
    # Testig if nan, if so then set to zero
    if math.isnan(RetVal) :
        RetVal = 0
        
    return RetVal
Пример #14
0
def leseSQL_Leitungen(InfoSQL):
    """
    Liest Leitungs Tabelle from SQL data base
    
    Eingabe:
        InfoSQL         Strukture von SQL DatenBank Zuganz Daten
    Ausgabe:
        Leitung         Liste von Leitung der Klasse NetzKlassen.Leitung()
    """
    
    Leitungen   = []
    
    con         = connect(dbname = InfoSQL['IO']["DataBaseName"],  user = InfoSQL['IO']["User"],  host = InfoSQL['IO']["Host"],  port = int(InfoSQL['IO']["Port"]),  password = InfoSQL['IO']["PassWord"])
    cur         = con.cursor()
    CurString   = "SELECT * FROM " + InfoSQL['IO']["TabName_Leitungen"]
    cur.execute(CurString)
    
    Leitungen = cur.fetchall()

    cur.close()
    con.close()
    
    # Initializieren von Variabeln
    countLeitung    = 0
    countLine       = 0
    MaxNum          = len(Leitungen)
    Leitung         = []
    
    AlleAlleName = []
    for ii in range(MaxNum):
         AlleAlleName.append(Leitungen[ii][2])
    
    #     
    while countLine < MaxNum:
        Leitung.append(K_Netze.Leitung())
        dieserLeitungsName                  = Leitungen[countLine][2]           # LeitungsNamen
        dieserPunktName                     = Leitungen[countLine][3]           # LeitungsNamen
        Leitung[countLeitung].name          = dieserLeitungsName
        Leitung[countLeitung].node_id       = dieserPunktName                   # PunktNamen
        Leitung[countLeitung].param['description']   = Leitungen[countLine][6]
        
        #Leitung[countLeitung].__dict__
        # dir(Leitung[countLeitung])
        
        # Kreiert restliche list von LeitungsNamen
        allLeitungsNames                = AlleAlleName[countLine+1:]
        pos = M_FindPos.find_pos_StringInList(dieserLeitungsName, allLeitungsNames)
        if len(pos) == 1:
            dieserPunktName                 = Leitungen[countLine + 1 + pos[0]][3]
            Leitung[countLeitung].node_id.append(dieserPunktName)
        elif len(pos) > 1:
            dieserPunktName                 = Leitungen[countLine + 1+ pos[len(pos) - 1]][3]
            pos                             = pos[0:len(pos)-1]
            for p in pos:
                Leitung[countLeitung].node_id.append(Leitungen[countLine + 1 + p][3])
            Leitung[countLeitung].node_id.append(dieserPunktName)
            pos.append(0)
        else:
            print('Leitung defekt')
        
        
        countLeitung    = countLeitung  + 1
        countLine       = countLine     + 1 + len(pos)

    return Leitung
Пример #15
0
def getMatch_LatLong_CountryCode(comp_0, comp_1, method = 'inv', thresholdVal = None):
    """Gets the separation between two points in km, and returns as 100-1/distance and other methods.  
    Method allows to select different measures of distance returned (distance used here in [km]), from: 
    "inv"       (100 / distance), 
    "power2inv" (100 / (distance^2)), 
    "loginv"    (100 / log(distance), with base e),
    "log10inv"  (100 / log10(distance), with base 10),
    "distance" (distance)
    """
    # Initialization
        
    RetVal = 0
    # Netz_0 is empty
    if comp_0 == '':
        RetVal = 0
        
    # Netz_1 is empty
    elif comp_1 == '':
        RetVal = 0
    
    elif comp_0.long == None:
        RetVal = 0
    
    elif comp_1.long == None:
        RetVal = 0
    
    elif type(comp_0.lat) == str:
        print('ERROR: M_Matching.getComp_LatLong: input type is string.  Float expected. comp_0')
        RetVal
    elif type(comp_1.lat) == str:
        print('ERROR: M_Matching.getComp_LatLong: input type is string.  Float expected. comp_1')
        RetVal = 0

    # Both Netze contain components  
    else:
        cc_Netz_0    = comp_0.country_code
        cc_Netz_1    = comp_1.country_code
        
        if cc_Netz_0 == cc_Netz_1 or cc_Netz_1 == None or cc_Netz_0 == None:
            # Creation of LatLong "vector" from component latlong
            
            latlong_Netz_0  = K_Comp.PolyLine(lat = [comp_0.lat], long = [comp_0.long] ) #M_Netze.get_latlongPairs_Points(comp_0)
            thisLatLong     = K_Comp.PolyLine(lat = comp_1.lat, long = comp_1.long ) #M_Netze.get_latlongPairs_Points(comp_1)
            
            [pos, minVal]   = M_FindPos.find_pos_closestLatLongInList(thisLatLong, latlong_Netz_0)
            #minVal          = minVal/1000
            
            if minVal == 0.0:
                RetVal = 100
            elif method == 'inv':
                RetVal = min([100 / minVal, 100])
            elif method == 'power2inv':
                RetVal = 100 / minVal/minVal
            elif method == 'log10inv':
                RetVal = 100 / math.log(minVal, 10)
            elif method == 'loginv':
                RetVal = 100 / math.log(minVal)
            elif method == 'distance':
                RetVal = minVal
            elif method == 'distanceThreshold':
                if minVal <= thresholdVal:
                    RetVal = 100
            elif method == 'exp':
                    RetVal = 100 *  math.exp(-minVal*1000/thresholdVal)
            else:
                print('ERROR: M_Matching: get_Comp_LatLong: method not defined.')
        else:
            return -100000

    # Testig if nan, if so then set to zero
    if math.isnan(RetVal) :
        RetVal = 0
                
    return RetVal

	
	
	
#def replacePipeSegments(Netz_Main, Netz_Fine, nodeDistance = 10000, lengthDistance = 0.2):
#    """This function does not do a thing
#    """	
#   # Determine which nodes are the same in both data sets
#    [pos_match_Netz_0, pos_add_Netz_0, pos_match_Netz_1, pos_add_Netz_1] = match(
#        Netz_Main, Netz_Fine, compName = 'Nodes', threshold = 45, multiSelect = False,
#        numFuncs = 1,
#        funcs = (
#        lambda comp_0, comp_1: getMatch_LatLong_CountryCode(comp_0, comp_1, method = 'inv')
#        ))
#    
#    # Convert Netz_Fine into NetWorkx
#    InfoDict = {'Gewichtung': 'Gleich', 'Weight': 'Gleich'}
#    [Graph_Fine, MDGraph_Fine]    = M_Graph.build_nx(InfoDict, Netz_Fine)
#    [Graph_Main, MDGraph_Main]    = M_Graph.build_nx(InfoDict, Netz_Main)
#    
#    for pipe1 in Netz_Main.PipeSegments:
#        # Determine length of network 1
#        pair        = [pipe1.node_id[0], pipe1.node_id[1]]
#        length_Main = M_Graph.get_shortest_paths_distances(Graph_Main, pair, edge_weight_name = 'length')
#        
#        # Determine length of network 2
#        #pos = M_FindPos.find_pos_ValInVector(Val, Vector, Type)
#        length_Fine = M_Graph.get_shortest_paths_distances(Graph_Fine, pair, edge_weight_name = 'length')
#    
#    print('M_Matching.replacePipeSegments: this function need checking, currently does nothing')
#    
#    return Netz_Main
	
Пример #16
0
def fixPipeSegmentsNode(PipeSegments, Nodes):
    """ Fixing wrong Start_point and End_Point id in respect of lat long

    \n.. comments: 
    Input:
        PipeSegments:   List of PipeSegments
        Nodes:          List of Nodes
    Return:
        PipeSegments
    """    
    
    node_id     = []
    node_lat    = []
    node_long   = []
    count       = 0
#    sourceIDNotSwapt = []
#    sourceIDSwapt = []
    
    # Getting Node Id, lat and long of nodes
    for nod in Nodes:
        node_id.append(nod.id)
        node_lat.append(round(nod.lat, roundNum))
        node_long.append(round(nod.long, roundNum))
    
    
    # Checking of there is a node in twice
    for id in node_id:
        pos     = M_FindPos.find_pos_ValInVector(id, node_id, '==')
        if len(pos) != 1:
            print('node ' + str(id) + ' funny.  Found ' + str(len(pos)))

    
    # going through each PipeSegment, and finding corresponding Nodes lat Long values 
    # and checking if they are same with Pipe Lat LONGS
    for pipe in PipeSegments:
        S_pipe_node_id = copy.deepcopy(pipe.node_id[0])
        S_lat     = round(pipe.lat[0], roundNum)
        S_long    = round(pipe.long[0], roundNum)
        
        E_pipe_node_id = copy.deepcopy(pipe.node_id[1])
        E_lat     = round(pipe.lat[-1], roundNum)
        E_long    = round(pipe.long[-1], roundNum)
        
        S_pos     = M_FindPos.find_pos_ValInVector(S_pipe_node_id, node_id, '==')
        E_pos     = M_FindPos.find_pos_ValInVector(E_pipe_node_id, node_id, '==')
        

        if node_lat[S_pos[0]] != S_lat or node_long[S_pos[0]] != S_long:
            if len(S_pos) != 1:
                print('Warning: Start Node Multiple times ' + str(S_pipe_node_id))
            elif len(S_pos) != 1:
                print('Warning: End Node Multiple times ' + str(E_pipe_node_id))
            elif node_lat[S_pos[0]] == E_lat and  node_long[S_pos[0]] == E_long and node_lat[E_pos[0]] == S_lat and  node_long[E_pos[0]] == S_long:
                pipe.node_id = [E_pipe_node_id, S_pipe_node_id]
                count = count + 1
            else:
                print('Warning: still wrong start ' + str(S_pipe_node_id))
    
#    if count > 0:
#        print('needed to rotate ' + str(count) + ' pipelines')
    
    return PipeSegments 
Пример #17
0
def read(RelDirName='Eingabe/CSV/', NumDataSets=1e+100, skiprows=[]):
    """Description:
    ------------
        Reads Data from folder CSV_Path into Grid 
        Grid = Instance of Netz Class
        
    Input Parameter:
    ----------------
        RelDirName    string containing path name of data location [default: 'Eingabe/CSV/']
        NumDataSets   Number of elements to be read for each component [default: 1e+100]
        skiprows      number of rows to skip [default: []]
    Return Parameters:
    ------------------
        Grid            instance of class K_Netze.Netz, populated with 
                         data from CSV files  """
    # Dir name stuff
    DirName = Path.cwd() / RelDirName

    Grid = K_Netze.NetComp()
    FileList = K_Netze.NetComp().CompLabels()
    for key in FileList:
        count = 0
        filename = 'Gas_' + key + '.csv'
        CSV_File = str(DirName / filename)

        # Z set to zero if file does not exist
        Z = CSV_2_list(CSV_File, skiprows=skiprows)
        if len(Z) > 0:
            for entry in Z:
                Keys = list(entry.keys())
                Vals = list(entry.values())
                for ii in range(len(Vals)):
                    if Vals[ii] == 'None':
                        Vals[ii] = None
                    elif type(Vals[ii]) is float:
                        if math.isnan(Vals[ii]):
                            Vals[ii] = None
                    else:
                        try:
                            Vals[ii] = float(Vals[ii])
                        except:
                            pass

                pos_Id = M_FindPos.find_pos_StringInList('id', Keys)
                pos_Name = M_FindPos.find_pos_StringInList('name', Keys)
                pos_SId = M_FindPos.find_pos_StringInList('source_id', Keys)
                pos_Node = M_FindPos.find_pos_StringInList('node_id', Keys)
                pos_CC = M_FindPos.find_pos_StringInList('country_code', Keys)
                pos_lat = M_FindPos.find_pos_StringInList('lat', Keys)
                pos_long = M_FindPos.find_pos_StringInList('long', Keys)
                pos_comm = M_FindPos.find_pos_StringInList('comment', Keys)
                pos_para = M_FindPos.find_pos_StringInList('param', Keys)
                pos_meth = M_FindPos.find_pos_StringInList('method', Keys)
                pos_unce = M_FindPos.find_pos_StringInList('uncertainty', Keys)
                pos_tags = M_FindPos.find_pos_StringInList('tags', Keys)

                del entry['id']
                del entry['name']
                del entry['source_id']
                del entry['node_id']
                del entry['country_code']

                del entry['lat']
                del entry['long']
                del entry['comment']
                del entry['param']
                del entry['method']
                del entry['uncertainty']
                del entry['tags']

                id = Vals[pos_Id[0]]
                name = Vals[pos_Name[0]]
                source_id = makeList(Vals[pos_SId[0]])
                node_id = makeList(Vals[pos_Node[0]])
                country_code = makeList(Vals[pos_CC[0]])

                lat = Vals[pos_lat[0]]
                if isinstance(lat, str):
                    lat = eval(lat)

                long = Vals[pos_long[0]]
                if isinstance(long, str):
                    long = eval(long)

                comment = Vals[pos_comm[0]]
                param = eval(Vals[pos_para[0]].replace(': nan,',
                                                       ': float(\'nan\'),'))
                method = eval(Vals[pos_meth[0]].replace(
                    ': nan,', ': float(\'nan\'),'))
                uncertainty = eval(Vals[pos_unce[0]].replace(
                    ': nan,', ': float(\'nan\'),'))
                tags = eval(Vals[pos_tags[0]].replace(': nan,',
                                                      ': float(\'nan\'),'))

                Grid.__dict__[key].append(K_Component.__dict__[key](
                    id=id,
                    name=name,
                    source_id=source_id,
                    node_id=node_id,
                    country_code=country_code,
                    param=param,
                    lat=lat,
                    long=long,
                    method=method,
                    uncertainty=uncertainty,
                    tags=tags,
                    comment=comment))
                count = count + 1
                if count >= NumDataSets:
                    break
        else:
            Grid.__dict__[key] = []

    return Grid
Пример #18
0
def changePipeSegments(Netz, RelDirName  = 'LKD_NodeChanges.csv'):
    """Changes some pipe Segments based on an input CSV file
    """
    
    if os.path.exists(RelDirName):
        fid = open(RelDirName, 'r', encoding="utf-8", errors = "ignore")
        # Read header line
        fid.readline()
        
        csv_reader  = csv.reader(fid, delimiter = ";")
        InPipeIds   = Netz.get_Attrib(compName = 'PipeSegments', attribName = 'id')
        
        for row in csv_reader:
            # Getting pipe from CSV file
            PipeID      = str(row[0])
            #NodeCorrect = row[1]
            NodeWrong   = row[2]
            NodeNew     = row[3]
            lat         = float(row[4])
            long        = float(row[5])
            cc          = row[6]
            
            # getting corresponding pipeSegment from LKD data set
            pos = M_FindPos.find_pos_StringInList(String = PipeID, ListOfStrings = InPipeIds)
            
            if len(pos) == 1:
                
                if NodeNew == 'None':
                    # Removing pipe
                     Netz.PipeSegments[pos[0]].id = '-9999'
                     
                elif Netz.PipeSegments[pos[0]].node_id[0] == NodeWrong:
                    # PipeSegment from node
                     Netz.PipeSegments[pos[0]].node_id[0]       = NodeNew
                     Netz.PipeSegments[pos[0]].lat[0]           = lat
                     Netz.PipeSegments[pos[0]].long[0]          = long
                     Netz.PipeSegments[pos[0]].country_code[0]  = cc
                     Netz.PipeSegments[pos[0]].param['length']  = M_Projection.LatLong2DistanceValue(lat, long, Netz.PipeSegments[pos[0]].lat[-1], Netz.PipeSegments[pos[0]].long[-1])
                     # Node
                     Netz.Nodes.append(K_Component.Nodes(id = NodeNew, 
                                        name        = NodeNew, 
                                        source_id   = ['LKD_' + PipeID], 
                                        node_id     = ['N_' + NodeNew], 
                                        country_code = cc,
                                        lat         = lat, 
                                        long        = long, 
                                        param = {'comp_units': 0, 
                                        'operator_name' : None, 
                                        'is_import'         : 0, 
                                        'is_export'          : 0, 
                                        'H_L_conver'    : 0, 
                                        'operator_Z'    : None, 
                                        'compressor'    : [], 
                                        'entsog_key'    : None, 
                                        'is_crossBorder': 0, 
                                        'ugs'           : 0, 
                                        'production'    : 0, 
                                        'exact'         : 2, 
                                        'license'       : 'open data'}))
                     
                elif Netz.PipeSegments[pos[0]].node_id[1] == NodeWrong:
                    # PipeSegment to node
                     Netz.PipeSegments[pos[0]].node_id[1]       = NodeNew
                     Netz.PipeSegments[pos[0]].lat[-1]          = lat
                     Netz.PipeSegments[pos[0]].long[-1]         = long
                     Netz.PipeSegments[pos[0]].country_code[-1] = cc
                     Netz.PipeSegments[pos[0]].country_code[-1] = cc
                     Netz.PipeSegments[pos[0]].param['length']  = M_Projection.LatLong2DistanceValue(Netz.PipeSegments[pos[0]].lat[0], Netz.PipeSegments[pos[0]].long[0], lat, long)
                     # Node
                     Netz.Nodes.append(K_Component.Nodes(id = NodeNew, 
                                        name        = NodeNew, 
                                        source_id   = ['LKD_' + PipeID], 
                                        node_id     = ['N_' + NodeNew], 
                                        country_code = cc,
                                        lat         = lat, 
                                        long        = long, 
                                        param = {'comp_units': 0, 
                                        'operator_name' : None, 
                                        'is_import'         : 0, 
                                        'is_export'          : 0, 
                                        'H_L_conver'    : 0, 
                                        'operator_Z'    : None, 
                                        'compressor'    : [], 
                                        'entsog_key'    : None, 
                                        'is_crossBorder': 0, 
                                        'ugs'           : 0, 
                                        'production'    : 0, 
                                        'exact'         : 2, 
                                        'license'       : 'open data'}))
                else:
                    print('M_LKD.changePipeSegments: something wrong here too')
            else:
                print('M_LKD.changePipeSegments: something wrong here')
        
        Netz.select_byAttrib(['PipeSegments'], 'id', '-9999', '!=')
        
    return Netz
Пример #19
0
def join_Component_Meta(Elemente, Meta_Elemente, Meta_Namen, Meta_Typen,
                        Method_Name):
    """ Function to join elements (**Elemente**) with meta data of elements **Meta_Elemente**.  

    \n.. comments: 
    Input:
        Elemente:            Gas Netzwerk elements (topological information)
        Meta_Elemente:       Information of Meta data por pipelines
        Meta_Typen:          Variable type of the different Meta_Elemente (e.g. text, real)
        Meta_Namen:          Variabele namen of the Meta_Elemente
        Meta_Typen:          List of strings indicating the type of data.
    Return:
        Elemente:            Gas Netzwerk elements, linked with  Meta daten.
    """

    # Initializierung von Variabeln
    Meta_comp_ids = M_Helfer.get_attribFromList(Meta_Elemente, 'comp_id')
    countEle = 0
    posKeep = []
    posMeta = []
    try:
        for ele in Elemente:
            countMet = 0
            dieserWert = ele.id
            diserNodeID = ele.node_id

            pos = M_FindPos.find_pos_StringInList(dieserWert, Meta_comp_ids)

            if len(pos) > 0:
                posMeta.append(pos[0])
                posKeep.append(countEle)

                for idx, metName in enumerate(Meta_Namen):
                    if metName != 'comp_id' and metName != 'id':
                        # Check if param
                        if len(Method_Name[idx]) > 0:
                            Elemente[countEle].param.update(
                                {
                                    metName:
                                    getattr(Meta_Elemente[pos[0]], metName)
                                }
                            )  # setattr(Elemente[countEle], metName, getattr(Meta_Elemente[pos[0]], metName))
                            if getattr(
                                    Meta_Elemente[pos[0]], metName
                            ) == None:  # getattr(Meta_Elemente[pos[0]], metName) == None:
                                Elemente[countEle].param.update(
                                    {metName: None}
                                )  # setattr(Elemente[countEle], metName, None)

                        # Non param
                        else:
                            setattr(Elemente[countEle], metName,
                                    getattr(Meta_Elemente[pos[0]], metName))
                            if getattr(Meta_Elemente[pos[0]], metName) == None:
                                setattr(Elemente[countEle], metName, None)

                Elemente[countEle].node_id = diserNodeID
                Elemente[countEle].id = dieserWert

                countMet = countMet + 1

            countEle = countEle + 1

        Temp = K_Netze.NetComp()
        Temp.Temp = Elemente
        Temp.select_byPos('Temp', posKeep)
        Elemente = Temp.Temp

    except:
        print("ERROR: M_Verknuepfe.join_Component_Meta")
        raise

    return Elemente
Пример #20
0
def read_PipeLines(NumDataSets=1e+100, RelDirName='Eingabe/InternetDaten/'):
    """ Reading of pipeline information from CSV file. Number of pipelines to read given with 
	**NumDataSets**, and location of relative path folder is **RelDirName**
    
    \n.. comments:
    Input:
        NumDataSets: 	Maximum number of elements to be read
						(default = 1e+100)
		RelDirName: 	String containing relative directory name
						(default = 'Eingabe/InternetDaten/')
    Return:
        PipeLines:       PipeLines component
    """

    # Initializierung von Variabeln
    id = []
    name = []
    node_id = []
    meta_id = []
    source_id = []
    PipeLines = []

    dataFolder = Path.cwd()
    filename = dataFolder / RelDirName

    # Opening file and reading header lines
    FileName = str(filename / 'Loc_PipePoints.csv')

    if os.path.exists(FileName):
        fid = open(FileName, 'r', encoding="utf-8")

        for ii in list(range(1 + 2)):
            fid.readline()

        # reading with CSV
        csv_reader = csv.reader(fid, delimiter=";")

        for row in csv_reader:
            id.append(row[0])
            source_id.append(''.join([ID_Add, str(row[0])]))
            name.append(row[1])
            node_id.append(row[2])
            meta_id.append(row[3])

        # schliezen der CSV Datei
        fid.close()

        # Initializieren von Variabeln
        countLeitung = 0
        countLine = 0
        MaxNum = len(name)

        #

        #max_pressure_bar oder pressure Hat hier nix verloren
        while countLine < MaxNum:
            PipeLines.append(
                K_Component.PipeLines(id=None,
                                      name='',
                                      node_id=[],
                                      country_code=None,
                                      source_id=[],
                                      lat=None,
                                      long=None))
            dieserLeitungsName = name[countLine]  # LeitungsNamen
            dieserPunktName = node_id[countLine]  # LeitungsNamen
            dieserMet_id = meta_id[countLine]
            dieserid = id[countLine]
            dieserSource_id = source_id[countLine]

            PipeLines[countLeitung].id = dieserid
            PipeLines[countLeitung].name = dieserLeitungsName
            PipeLines[countLeitung].node_id = [dieserPunktName]  # PunktNamen
            PipeLines[countLeitung].source_id = [dieserSource_id]
            PipeLines[countLeitung].param['meta_id'] = dieserMet_id

            # Kreiert restliche list von LeitungsNamen
            allLeitungsNames = name[countLine + 1:]
            if countLeitung == 31:
                countLeitung = countLeitung
            pos = M_FindPos.find_pos_StringInList(dieserLeitungsName,
                                                  allLeitungsNames)
            if len(pos) == 1:
                dieserPunktName = node_id[countLine + 1 + pos[0]]
                PipeLines[countLeitung].node_id.append(dieserPunktName)
            elif len(pos) > 1:
                dieserPunktName = node_id[countLine + 1 + pos[len(pos) - 1]]
                pos = pos[0:len(pos) - 1]
                for p in pos:
                    PipeLines[countLeitung].node_id.append(node_id[countLine +
                                                                   1 + p])
                PipeLines[countLeitung].node_id.append(dieserPunktName)

                pos.append(0)
            else:
                print('Leitung defekt')

            countLeitung = countLeitung + 1
            countLine = countLine + 1 + len(pos)

            # push user steop based on NumDataSets
            if countLeitung > NumDataSets:
                return PipeLines

    return PipeLines
Пример #21
0
def find_MatchNetzPoint(Netz_1,
                        CompName_1,
                        Netz_2,
                        CompName_2,
                        multDist=1,
                        testRun=False,
                        powerVal=1):
    """ Finds a vector containing the positions of which EntsoG component should be 
    linked with which point from Netz class instance. The following attributes are 
    currently implemented: name, lat, and long.  Input to method are **EntsoGCompName**, 
    **Netz** instance, **NetzCompName**, **multDist**, **testRun**, **powerVal**.  
    Return are the position lists for the EntsoG instance, the Netz instance, 
    and the Goodness value (ranging 0..1).

    \n.. comments: 
    Input:
        Netz_1:          Netz Class instance
        CompName_1:      string, of Netz_1 component name, to be used.  
        Netz_2:          instance of class Netz
        CompName_2:      string, of Netz_2 component name, to be used.  
        multDist:        (Optional = 1)  
        testRun:         (Optional = False), for:  
                             True  = will NOT carry out the long while loop  
                             False = will carry out the long while loop  
                             []    = will carry out the long while loop  
    Return:  
        posEntsoG:       List of ints, of positions from EntsoG  
        posNetz:         List of ints, of positions from Netz  
        GoodnessVal:     list of floats, of goodness values  
    """

    # Selecting the dat based on Component from EntsoG
    Comp_1 = Netz_1.__dict__[CompName_1]

    # Selecting the dat based on Component from Netze
    Comp_2 = Netz_2.__dict__[CompName_2]

    # Initialization of variables
    pos_1 = []
    pos_2 = []
    GoodnessVal = []
    posLeft_1 = [s for s in range(len(Comp_1))]
    posLeft_2 = [s for s in range(len(Comp_2))]

    Run_1 = True
    Run_2 = True

    # So that Test Runs with shorter time can be executed
    if testRun:
        Run_1 = False

    #script_dir  = path.dirname(__file__)
    #logFileName = path.join(script_dir, '../Ausgabe/log_' + str(multDist) + '.csv')
    logFileName = '../Ausgabe/log_' + str(multDist) + '.csv'

    Name_Orig_1 = M_Helfer.get_NotPos(Comp_1, pos_1, 'name')
    Name_Orig_2 = M_Helfer.get_NotPos(Comp_2, pos_2, 'name')

    # Running through data set for first time, to catch all locations, where name is totally same
    [Name_1, lat_1, long_1] = M_Helfer.get_NotPos3(Comp_1, pos_1)
    [Name_2, lat_2, long_2] = M_Helfer.get_NotPos3(Comp_2, pos_2)

    # Getting matching location names
    [New_pos_1, New_pos_2] = M_Helfer.get_NameMatch(Name_1, Name_2)

    # Getting
    Distances = M_Projection.LatLong2DistanceMatrix(lat_1, long_1, lat_2,
                                                    long_2)
    InvDistReal2 = M_MatLab.pow_Matrix(Distances, powerVal)
    InvDistReal = M_MatLab.multi_MatrixConst(InvDistReal2, multDist)

    # Schreiben von Ergebnissen in eine CSV Datei
    if os.path.isfile(logFileName):
        os.remove(logFileName)
    M_Helfer.txt2File(
        logFileName,
        'EntsoG_Name;Netz_Name;NameSim;Distance;Goodness;EntsoG_pos;Netz_pos')
    for ii in range(len(New_pos_1)):
        # adding new positoins to vec of positions found
        pos_1.append(New_pos_1[ii])
        pos_2.append(New_pos_2[ii])
        GoodnessVal.append(100 - InvDistReal[New_pos_1[ii]][New_pos_2[ii]])
        # removing positions that are found from vector of Pos to be found
        try:
            posLeft_1.remove(New_pos_1[ii])
        except:
            pass
        try:
            posLeft_2.remove(New_pos_2[ii])
        except:
            pass

        # writing to log file
        strstr = Name_Orig_1[New_pos_1[ii]] + ';' + \
            Name_Orig_2[New_pos_2[ii]] + ';' + \
            '100;' + \
            str(Distances[New_pos_1[ii]][New_pos_2[ii]]) + ';' + \
            str(100 - InvDistReal[New_pos_1[ii]][New_pos_2[ii]]) + ';' + \
            str(New_pos_1[ii]) + ';' + str(New_pos_2[ii])
        M_Helfer.txt2File(logFileName, strstr)

    # Generating un-shrunk data for later
    [Orig_Name_1, Orig_lat_1, Orig_long_1] = M_Helfer.get_NotPos3(Comp_1, [])
    [Orig_Name_2, Orig_lat_2, Orig_long_2] = M_Helfer.get_NotPos3(Comp_2, [])

    # Forming matrixes
    Name_Matrix_Orig = M_Helfer.get_NameMatrix_Fuzzy(Orig_Name_1, Orig_Name_2)

    Dist_Matrix_Orig = M_Projection.LatLong2DistanceMatrix(
        Orig_lat_1, Orig_long_1, Orig_lat_2, Orig_long_2)
    Dist_Matrix_Orig2 = M_MatLab.pow_Matrix(Dist_Matrix_Orig, powerVal)
    Dist_Matrix_Orig3 = M_MatLab.multi_MatrixConst(Dist_Matrix_Orig2, multDist)

    # Combining matrixes
    GoodnessMatrix_Orig = M_MatLab.sub_2Matrix(Name_Matrix_Orig,
                                               Dist_Matrix_Orig3)

    # Now going through the rest of the data set
    while Run_2 and Run_1:

        GoodnessMatrix_Shrunk = M_MatLab.shrink_Matrix(GoodnessMatrix_Orig,
                                                       posLeft_1, posLeft_2)
        Name_Matrix_Shrunk = M_MatLab.shrink_Matrix(Name_Matrix_Orig,
                                                    posLeft_1, posLeft_2)
        Dist_Matrix_Shrunk = M_MatLab.shrink_Matrix(Dist_Matrix_Orig,
                                                    posLeft_1, posLeft_2)

        # determin popsitions in shrunk data sets
        [pos_Shrunk_1, pos_Shrunk_2
         ] = M_FindPos.find_pos_ConditionInMatrix(GoodnessMatrix_Shrunk, 'max')

        nam = Name_Matrix_Shrunk[pos_Shrunk_1][pos_Shrunk_2]
        dis = Dist_Matrix_Shrunk[pos_Shrunk_1][pos_Shrunk_2]

        GoodnessVal.append(GoodnessMatrix_Shrunk[pos_Shrunk_1][pos_Shrunk_2])
        # dtermin position in original data sets
        pos_Orig_1 = posLeft_1[pos_Shrunk_1]
        pos_Orig_2 = posLeft_2[pos_Shrunk_2]

        pos_1.append(pos_Orig_1)
        pos_2.append(pos_Orig_2)

        posLeft_1.remove(pos_Orig_1)
        posLeft_2.remove(pos_Orig_2)

        # For Log file
        strstr = Name_Orig_1[pos_Orig_1] + ';' + Name_Orig_2[pos_Orig_2] + \
                      ';' + str(nam) + ';' + str(dis) + ';' + \
                      str(GoodnessMatrix_Shrunk[pos_Shrunk_1][pos_Shrunk_2]) + ';' + \
                      str(pos_Orig_1) + ';' + str(pos_Orig_2)
        M_Helfer.txt2File(logFileName, strstr)

        # Check if need to stop
        if len(pos_1) == len(Comp_1):
            Run_1 = False
        if len(pos_2) == len(Comp_2):
            Run_2 = False

    return pos_1, pos_2, GoodnessVal
Пример #22
0
def match(Netz_0, Netz_1, compName, threshold, multiSelect, funcs, numFuncs=2):
    """
    Main function that matches positions of component from different sources/.. 
    to each other, by using different functions that the user specifies.
    
    \n.. comments: 
    Input:
        Netz_0          Instance of first network netz 
        Netz_1          Instance of second network netz 
        compName        string containing compnent lable
        threshold       overall threshold for selection between points, value 
                        between 0 and 100
        funcs           functions, that the user wants to use to find the match
    Return:
        pos_match_Netz_0    ordered list of positions, in respect of Netz_0, that have been linked with positions from Netz_1
        pos_match_Netz_1    ordered list of positions, in respect of Netz_1, that have been linked with positions from Netz_0
        pos_add_Netz_1      list of positions of Netz_1, that need to be added to Netz_0
        pos_add_Netz_0      list of positions of Netz_0, for which a corresponding element was not found in Netz_1
    Example:
        [pos_match_Netz_0, pos_match_Netz_1, pos_add_Netz_1, pos_add_Netz_0] = M_Matching.match(
            Netz_0, Netz_1, compName = 'LNGs', threshold = 80,
            funcs = (lambda comp_0, comp_1: M_Matching.getMatch_Names(comp_0, comp_1), 
                lambda comp_0, comp_1: M_Matching.getMatch_LatLong(comp_0, comp_1, 50000)
                ))
    """
    # Initialization
    pos_match_Netz_0 = []
    pos_match_Netz_1 = []
    pos_add_Netz_1 = []
    pos_add_Netz_0 = []
    num_comp_Netz_0 = len(Netz_0.__dict__[compName])
    num_comp_Netz_1 = len(Netz_1.__dict__[compName])

    # creating of a dummy matrix of size of the number of elements from both data set.
    goodness_Matrix = M_Helfer.get_NameMatrix_Fuzzy(
        [str(x) for x in range(len(Netz_0.__dict__[compName]))],
        [str(y) for y in range(len(Netz_1.__dict__[compName]))])
    if numFuncs == 1:
        goodness_Matrix_2 = copy.deepcopy(goodness_Matrix)
    else:
        goodness_Matrix_2 = [
            copy.deepcopy(goodness_Matrix),
            copy.deepcopy(goodness_Matrix)
        ]

    # loops that goes through each combination of component pairs from both data
    # sets and calculating a goodness value, with values between 0 and 100,
    # based on the sum of the functions supplied by the user

    if numFuncs == 1:
        for ii in range(len(Netz_0.__dict__[compName])):
            # going through each element of Netz_0
            comp_0 = Netz_0.__dict__[compName][ii]

            # going through each element of Netz_1
            for jj in range(len(Netz_1.__dict__[compName])):
                comp_1 = Netz_1.__dict__[compName][jj]
                # getting goodness over all functions
                goodness = funcs(comp_0, comp_1)
                goodness_Matrix_2[ii][jj] = goodness
                goodness_Matrix[ii][jj] = goodness + goodness_Matrix_2[ii][jj]

    else:
        for ii in range(len(Netz_0.__dict__[compName])):
            # going through each element of Netz_0
            comp_0 = Netz_0.__dict__[compName][ii]

            # going through each element of Netz_1
            for jj in range(len(Netz_1.__dict__[compName])):
                comp_1 = Netz_1.__dict__[compName][jj]
                # getting goodness over all functions
                goodness = 0
                ff = 0
                for func in funcs:
                    goodness_Matrix_2[ff][ii][jj] = func(comp_0, comp_1)
                    goodness = goodness + goodness_Matrix_2[ff][ii][jj]
                    ff = ff + 1
                goodness = goodness / len(funcs)
                #                goodness   = sum(func(comp_0, comp_1) for func in funcs) / len(funcs)`

                # writing goodness value to matrix.
                goodness_Matrix[ii][jj] = goodness

    #goodness_Matrix_Const   = copy.deepcopy(goodness_Matrix)

    # now finding the best matching pairs, so looking at the goodness_Matrix and
    # starting off by selecting the one with the largest goodness, removing this pair
    # from the options and then looking for the next highest goodness value, and
    # removing this pair, and then repeating this till the goodness value dropps
    # below the threshold supplied by user.
    while True:
        # finding largest goodness vlue
        [pos_0,
         pos_1] = M_FindPos.find_pos_ConditionInMatrix(goodness_Matrix, 'max')

        # check if gooddness value still above threshold value
        if goodness_Matrix[pos_0][pos_1] > threshold:
            # Keeping the positions thare are best
            pos_match_Netz_0.append(pos_0)
            pos_match_Netz_1.append(pos_1)

            # removing pairs from options, by setting values to -inf
            for xx in range(num_comp_Netz_0):
                goodness_Matrix[xx][pos_1] = -np.inf

            if multiSelect != True:
                for yy in range(num_comp_Netz_1):
                    goodness_Matrix[pos_0][yy] = -np.inf

        # if next goodnes value is smaller than threshold or none left, then
        # creating of further return parameters and then leaving function
        elif (goodness_Matrix[pos_0][pos_1]
              == -np.inf) or (goodness_Matrix[pos_0][pos_1] <= threshold):
            temp_1 = [
                item for item in range(num_comp_Netz_1)
                if item not in pos_match_Netz_1
            ]
            temp_0 = [
                item for item in range(num_comp_Netz_0)
                if item not in pos_match_Netz_0
            ]

            for wert in temp_1:
                pos_add_Netz_1.append(wert)
            for wert in temp_0:
                pos_add_Netz_0.append(wert)

            break

    return [pos_match_Netz_0, pos_add_Netz_0, pos_match_Netz_1, pos_add_Netz_1]
Пример #23
0
def schrott():

    G_Set_1 = []
    G_Set_2 = []
    length_Set_1 = []
    length_Set_2 = []
    minimum_Diff_Val = []
    edge_id_set_1 = []
    PercDiff = []
    minimum_Diff_Index = []
    nodes_id_Set_1 = []
    nodes_id_Set_2 = []
    nodeID_Set_1_Friends = []
    nodeID_Set_2_Friends = []
    cutoff = []
    dist_matrix_Diff = []

    print('set 1 ', nodeID_Set_1_Friends[minimum_Diff_Index[0]])
    print('set 1 ', nodeID_Set_1_Friends[minimum_Diff_Index[1]])
    print(' ')
    print('set 2 ', nodeID_Set_2_Friends[minimum_Diff_Index[0]])
    print('set 2 ', nodeID_Set_2_Friends[minimum_Diff_Index[1]])
    print(' ')

    minimum_Diff_Index = np.unravel_index(
        np.argmin(dist_matrix_Diff, axis=None), dist_matrix_Diff.shape)
    print('set 1 ', nodeID_Set_1_Friends[minimum_Diff_Index[0]])
    print('set 1 ', nodeID_Set_1_Friends[minimum_Diff_Index[1]])
    print(' ')
    print('set 2 ', nodeID_Set_2_Friends[minimum_Diff_Index[0]])
    print('set 2 ', nodeID_Set_2_Friends[minimum_Diff_Index[1]])

    edge_distMatrix = makeRelDistMatrix(length_Set_1, length_Set_2)
    minimum_Diff_Index = np.unravel_index(
        np.argmin(edge_distMatrix, axis=None), edge_distMatrix.shape)
    minimum_Diff_Val = edge_distMatrix.min()

    print(' ')
    figNum = 1
    fig = plt.figure(figNum)
    NX.draw(G_Set_1, NX.get_node_attributes(G_Set_1, 'pos'), node_size=7)
    fig.show()
    figNum = figNum + 1

    # jumping into while loop, as long as relative distance value is smaller than threshold
    while len(length_Set_1) > 0 and len(
            length_Set_2) > 0 and minimum_Diff_Val < PercDiff:
        ####[]
        print('While Loop')
        print(edge_id_set_1)
        # 1) removing edges from  network 1
        for u, v, key, data in list(G_Set_1.edges(None, data=True, keys=True)):
            if data['id'] in edge_id_set_1[minimum_Diff_Index[0]]:
                print('removing edge: ', data['id'])
                # finding nodes set
                pos = M_FindPos.find_pos_StringInTouple(
                    data['id'][0], edge_id_set_1[minimum_Diff_Index[0]])
                nodes = nodes_id_Set_1[minimum_Diff_Index[0]][pos[0]]

                # Disecting nodes string
                pos = M_FindPos.find_pos_CharInStr(',', nodes)
                node1 = nodes[:pos[0]]
                node2 = nodes[pos[0] + 1:]

                # Removing Edge
                G_Set_1.remove_edge(node1, node2, key=key)

                # Now moving node position if same node in both data sets
                if node1 in nodeID_Set_1_Friends:
                    pos = M_FindPos.find_pos_StringInList(
                        node1, nodeID_Set_1_Friends)
                    lat = G_Set_2.node[nodeID_Set_2_Friends[pos[0]]]['pos'][1]
                    long = G_Set_2.node[nodeID_Set_2_Friends[pos[0]]]['pos'][0]
                    G_Set_1.node[node1]['pos'] = (long, lat)

                if node2 in nodeID_Set_1_Friends:
                    pos = M_FindPos.find_pos_StringInList(
                        node2, nodeID_Set_1_Friends)
                    lat = G_Set_2.node[nodeID_Set_2_Friends[pos[0]]]['pos'][1]
                    long = G_Set_2.node[nodeID_Set_2_Friends[pos[0]]]['pos'][0]
                    G_Set_1.node[node2]['pos'] = (long, lat)

        # Plotting resulting network
        fig = plt.figure(figNum)
        NX.draw(G_Set_1, NX.get_node_attributes(G_Set_1, 'pos'), node_size=7)
        fig.show()
        figNum = figNum + 1

        # 2) rerunning get_matchingPath
        print('execution of get_PathInfo in while loop')
        lat_Set_1, long_Set_1, length_Set_1, edge_id_set_1, nodes_id_Set_1 = get_PathInfo(
            G_Set_1,
            nodeID_Set_1_Friends[minimum_Diff_Index[0]],
            nodeID_Set_1_Friends[minimum_Diff_Index[1]],
            cutoff=cutoff)
        lat_Set_2, long_Set_2, length_Set_2, edge_id_set_2, nodes_id_Set_2 = get_PathInfo(
            G_Set_2,
            nodeID_Set_2_Friends[minimum_Diff_Index[0]],
            nodeID_Set_2_Friends[minimum_Diff_Index[1]],
            cutoff=cutoff)

        if len(lat_Set_2) > 0:
            edge_distMatrix = makeRelDistMatrix(length_Set_1, length_Set_2)
            minimum_Diff_Index = np.unravel_index(
                np.argmin(edge_distMatrix, axis=None), edge_distMatrix.shape)
            minimum_Diff_Val = edge_distMatrix.min()
        else:
            minimum_Diff_Val = PercDiff * 2

    ###########################################################################
    # Joining the two networks
    ###########################################################################
    G_Set_Sum = NX.compose(G_Set_1, G_Set_2)

    # removing nodes of degree zero
    deg = G_Set_Sum.degree(G_Set_Sum)
    for n in list(G_Set_Sum.nodes()):
        if deg[n] == 0:
            G_Set_Sum.remove_node(n)

    # converting graph into network
    G_Netz = M_Graph.Graph2Netz(G_Set_Sum)
    print('leaving function')
    return G_Netz, G_Set_Sum