def find_missing_points(Punkte1, FeldName, Punkte2): """ Function returning points that are in data set **Punkte1** but not in **Punkte2**. The attribute label that is being used to compare the to data sets is given through **FeldName**. \n.. comments: Input: Punkte1: List of points (first data set) FeldName: String, which is link between the two data sets. Currently only "name" implemented Punkte2: List of points (second data set) Return: FehlendePunkte: List von String von (Punkte.name). """ # Initializierung von Variabeln FehlendePunkte = [] if FeldName.lower() == "name": for punke in Punkte1: pos = M_FindPos.find_pos_StringInList(punke.name, Punkte2) if len(pos) == 0: FehlendePunkte.append(punke.name) elif FeldName.lower() == "node_id": for punke in Punkte1: pos = M_FindPos.find_pos_StringInList(punke.node_id, Punkte2) if len(pos) == 0: FehlendePunkte.append(punke.node_id) else: print('ERROR: ' + sys.argv[0] + '.find_missing_points: code not written yet!') raise return FehlendePunkte
def read_CSV(CSV_Path): """Description: ------------ Reads Data from folder CSV_Path into Grid Grid = Instance of Netz Class Input Parameter: ---------------- CSV_Path string containing path name of data location Return Parameters: ------------------ Grid instance of class K_Netze.Netz, populated with data from CSV files""" FileList = [ 'BorderPoints', 'PipePoints', 'Compressors', 'Nodes', 'EntryPoints', 'InterConnectionPoints', 'LNGs', 'Meta_BorderPoints', 'Meta_Compressors', 'Meta_EntryPoints', 'Meta_InterConnectionPoints', 'Meta_LNGs', 'Meta_PipePoints', 'Meta_Storages', 'Storages' ] print('') print(CC.Caption + 'Load CSV-Data into Grid' + CC.End) print('--------------------------------------') Grid = K_Netze.NetComp() Grid.Processes.append(K_Netze.Processes('M_CSV.read_CSV')) for filename in os.listdir(CSV_Path): # check if Filename is used for Import for key in FileList: if 'Meta_' in key: filename = key + '.csv' else: filename = 'Gas_' + key + '.csv' CSV_File = os.path.join((CSV_Path), (filename)) Z = CSV_2_list(CSV_File) if len(Z) > 0: for entry in Z: Keys = list(entry.keys()) Vals = list(entry.values()) posId = M_FindPos.find_pos_StringInList('id', Keys) posName = M_FindPos.find_pos_StringInList('name', Keys) del entry['id'] del entry['name'] Grid.__dict__[key].append(K_Netze.__dict__[key]( id=Vals[posId[0]], name=Vals[posName[0]], param=entry)) else: Grid.__dict__[key] = [] return Grid
def changeCountryCode(Netz, RelDirName = 'LKD_CountryCodeChanges.csv'): """Changes some pipe Segments based on an input CSV file """ if os.path.exists(RelDirName): # getting all new node ideas were to chnage counttry code fid = open(RelDirName, 'r', encoding="utf-8", errors = "ignore") # Read header line fid.readline() csv_reader = csv.reader(fid, delimiter = ";") allPipeID = [] allCC = [] for row in csv_reader: allPipeID.append(str(row[0])) allCC.append(row[1]) # going through each element in Netz and change countrycode for comp in Netz.CompLabels(): for ii, elem in enumerate(Netz.__dict__[comp]): if isinstance(elem.node_id, list): for jj, elemId in enumerate(elem.node_id): pos = M_FindPos.find_pos_StringInList(str(elemId), allPipeID) if len(pos) == 1: if isinstance(elem.country_code, list): elem.country_code[jj] = allCC[pos[0]] else: elem.country_code = allCC[pos[0]] else: pos = M_FindPos.find_pos_StringInList(str(elem.node_id), allPipeID) if len(pos) == 1: elem.country_code = allCC[pos[0]] return Netz
def join_PipeLine_Meta(Elemente, Meta_Elemente, Meta_Namen, Meta_Typen, Method_Name): """ Function to join elements (**Elemente**) with meta data of elements **Meta_Elemente**. \n.. comments: Input: Elemente: Gas Netzwerk elements (topological information) Meta_Elemente: Information from Meta data for PipeLines Meta_Namen: Variable names of Meta_Elemente Meta_Typen: List of strings indicating the type of data Method_Name: List of strings, containing indicator if column is to be stored in Param dict Return: Elemente: Gas Netzwerk elements linked to the Meta data. """ # Initializierung von Variabeln Meta_comp_ids = M_Helfer.get_attribFromList(Meta_Elemente, 'comp_id') countEle = 0 posKeep = [] try: for ele in Elemente: dieserWert = ele.param['meta_id'] pos = M_FindPos.find_pos_StringInList(dieserWert, Meta_comp_ids) if len(pos) > 0: posKeep.append(countEle) for idx, metName in enumerate(Meta_Namen): if metName != 'comp_id' and metName != 'id': if len(Method_Name[idx]) > 0: Elemente[countEle].param.update({ metName: getattr(Meta_Elemente[pos[0]], metName) }) else: setattr(Elemente[countEle], metName, getattr(Meta_Elemente[pos[0]], metName)) countEle = countEle + 1 Temp = K_Netze.NetComp() Temp.Temp = Elemente Temp.select_byPos('Temp', posKeep) Elemente = Temp.Temp except: print("ERROR: M_Verknuepfe.join_Component_Meta") raise return Elemente
def unique_String(Punkte): """ Function returning a list of unique string from input **Punkte**, using function M_FindPos.find_pos_StringInList. Data supplied through **Punkte**. \n.. comments: Input: Punkte Liste of type Gas_Klassen_Netz.Punkt Output: Punkte_Return Liste of type String based on Punkte.name. """ Punkte_Return = [] for DieserPunkt in Punkte: pos = M_FindPos.find_pos_StringInList(DieserPunkt, Punkte_Return) if len(pos) == 0: Punkte_Return.append(DieserPunkt) return Punkte_Return
def leseSQL_Punkte(InfoSQL, TabellenName): """ Liest Punkte Tabellen from SQL data base Eingabe: InfoSQL Strukture von SQL DatenBank Zuganz Daten TabellenName String, von TabellenNamen, via InfoSQL[TabellenName] !! Ausgabe: Punkte Liste von Punkten von Klasse NetzKlassen.Nodes() """ Punkte = [] # Ueberpruefe das Tabelle in DataBank ist AlleTabellenNamen = getAllTableNames(InfoSQL) Name = InfoSQL['IO'][TabellenName] if len(M_FindPos.find_pos_StringInList(Name, AlleTabellenNamen)): con = connect(dbname = InfoSQL['IO']["DataBAseName"], user = InfoSQL['IO']["User"], host = InfoSQL['IO']["Host"], port = int(InfoSQL['IO']["Port"]), password = InfoSQL['IO']["PassWord"]) cur = con.cursor() CurString = "SELECT * FROM " + InfoSQL['IO'][TabellenName] cur.execute(CurString) TabPunkte = cur.fetchall() cur.close() con.close() count = 0 for tab in TabPunkte: id = tab[0] name = tab[1] lat = tab[5] long = tab[6] land = tab[4] Punkte.append(K_Netze.Nodes(id = id, name = name, lat = lat, long = long, country_code = land, comment = None, param = {})) count = count + 1 return Punkte
def leseSQL_Meta(InfoSQL, TabellenName): """ Lies Meta-Data from Tabellen from SQL data base Eingabe: InfoSQL Strukture von SQL DatenBank Zuganz Daten TabellenName String, von TabellenNamen, via InfoSQL[TabellenName] !! Ausgabe: MetaData Meta data aus der Tabelle, in form eines Dict MetaType Daten Type fuer die Spalten der Meta Daten ColumnNames Name der Spalten der Meta Daten """ MetaData = [] MetaType = [] # Ueberpruefe das Tabelle in DataBank ist AlleTabellenNamen = getAllTableNames(InfoSQL) Name = InfoSQL['IO'][TabellenName] if len(M_FindPos.find_pos_StringInList(Name, AlleTabellenNamen)): con = connect(dbname = InfoSQL['IO']["DataBaseName"], user = InfoSQL['IO']["User"], host = InfoSQL['IO']["Host"], port = int(InfoSQL['IO']["Port"]), password = InfoSQL['IO']["PassWord"]) cur = con.cursor() CurString = "SELECT * FROM " + InfoSQL['IO'][TabellenName] cur.execute(CurString) MetaPunkte = cur.fetchall() cur.close() con.close() else: return MetaData # Lese SpaltenNamen ein con = connect(dbname = InfoSQL['IO']["DataBaseName"], user = InfoSQL['IO']["User"], host = InfoSQL['IO']["Host"], port = int(InfoSQL['IO']["Port"]), password = InfoSQL['IO']["PassWord"]) cur = con.cursor() CurString = "select * from INFORMATION_SCHEMA.COLUMNS where TABLE_NAME = '" + InfoSQL['IO'][TabellenName] + "'" CurString = "select * from " + InfoSQL['IO'][TabellenName] + " where 0=1" CurString = "select column_name, data_type from information_schema.columns where table_name = '" + InfoSQL['IO'][TabellenName] + "'" cur.execute(CurString) ColumnNames = [] MetaTypeRaw = [] ColumnNamesTuple = cur.fetchall() for name in ColumnNamesTuple: ColumnNames.append(name[0]) MetaTypeRaw.append(name[1]) cur.close() con.close() # Kreieren of Liste von MetaData count = 0 PassVall = [] # for ii in list(range(len(MetaPunkte))): # PassVall.append(MetaPunkte[ii][count]) for dicName in ColumnNames: for ii in list(range(len(MetaPunkte))): PassVall.append(MetaPunkte[ii][count]) if count == 0: MetaData= {dicName: PassVall} else: MetaData[dicName] = PassVall count = count + 1 MetaType = [typemap[typename] for typename in MetaTypeRaw] return [MetaData, MetaType, ColumnNames]
def leseSQL_Leitungen(InfoSQL): """ Liest Leitungs Tabelle from SQL data base Eingabe: InfoSQL Strukture von SQL DatenBank Zuganz Daten Ausgabe: Leitung Liste von Leitung der Klasse NetzKlassen.Leitung() """ Leitungen = [] con = connect(dbname = InfoSQL['IO']["DataBaseName"], user = InfoSQL['IO']["User"], host = InfoSQL['IO']["Host"], port = int(InfoSQL['IO']["Port"]), password = InfoSQL['IO']["PassWord"]) cur = con.cursor() CurString = "SELECT * FROM " + InfoSQL['IO']["TabName_Leitungen"] cur.execute(CurString) Leitungen = cur.fetchall() cur.close() con.close() # Initializieren von Variabeln countLeitung = 0 countLine = 0 MaxNum = len(Leitungen) Leitung = [] AlleAlleName = [] for ii in range(MaxNum): AlleAlleName.append(Leitungen[ii][2]) # while countLine < MaxNum: Leitung.append(K_Netze.Leitung()) dieserLeitungsName = Leitungen[countLine][2] # LeitungsNamen dieserPunktName = Leitungen[countLine][3] # LeitungsNamen Leitung[countLeitung].name = dieserLeitungsName Leitung[countLeitung].node_id = dieserPunktName # PunktNamen Leitung[countLeitung].param['description'] = Leitungen[countLine][6] #Leitung[countLeitung].__dict__ # dir(Leitung[countLeitung]) # Kreiert restliche list von LeitungsNamen allLeitungsNames = AlleAlleName[countLine+1:] pos = M_FindPos.find_pos_StringInList(dieserLeitungsName, allLeitungsNames) if len(pos) == 1: dieserPunktName = Leitungen[countLine + 1 + pos[0]][3] Leitung[countLeitung].node_id.append(dieserPunktName) elif len(pos) > 1: dieserPunktName = Leitungen[countLine + 1+ pos[len(pos) - 1]][3] pos = pos[0:len(pos)-1] for p in pos: Leitung[countLeitung].node_id.append(Leitungen[countLine + 1 + p][3]) Leitung[countLeitung].node_id.append(dieserPunktName) pos.append(0) else: print('Leitung defekt') countLeitung = countLeitung + 1 countLine = countLine + 1 + len(pos) return Leitung
def changePipeSegments(Netz, RelDirName = 'LKD_NodeChanges.csv'): """Changes some pipe Segments based on an input CSV file """ if os.path.exists(RelDirName): fid = open(RelDirName, 'r', encoding="utf-8", errors = "ignore") # Read header line fid.readline() csv_reader = csv.reader(fid, delimiter = ";") InPipeIds = Netz.get_Attrib(compName = 'PipeSegments', attribName = 'id') for row in csv_reader: # Getting pipe from CSV file PipeID = str(row[0]) #NodeCorrect = row[1] NodeWrong = row[2] NodeNew = row[3] lat = float(row[4]) long = float(row[5]) cc = row[6] # getting corresponding pipeSegment from LKD data set pos = M_FindPos.find_pos_StringInList(String = PipeID, ListOfStrings = InPipeIds) if len(pos) == 1: if NodeNew == 'None': # Removing pipe Netz.PipeSegments[pos[0]].id = '-9999' elif Netz.PipeSegments[pos[0]].node_id[0] == NodeWrong: # PipeSegment from node Netz.PipeSegments[pos[0]].node_id[0] = NodeNew Netz.PipeSegments[pos[0]].lat[0] = lat Netz.PipeSegments[pos[0]].long[0] = long Netz.PipeSegments[pos[0]].country_code[0] = cc Netz.PipeSegments[pos[0]].param['length'] = M_Projection.LatLong2DistanceValue(lat, long, Netz.PipeSegments[pos[0]].lat[-1], Netz.PipeSegments[pos[0]].long[-1]) # Node Netz.Nodes.append(K_Component.Nodes(id = NodeNew, name = NodeNew, source_id = ['LKD_' + PipeID], node_id = ['N_' + NodeNew], country_code = cc, lat = lat, long = long, param = {'comp_units': 0, 'operator_name' : None, 'is_import' : 0, 'is_export' : 0, 'H_L_conver' : 0, 'operator_Z' : None, 'compressor' : [], 'entsog_key' : None, 'is_crossBorder': 0, 'ugs' : 0, 'production' : 0, 'exact' : 2, 'license' : 'open data'})) elif Netz.PipeSegments[pos[0]].node_id[1] == NodeWrong: # PipeSegment to node Netz.PipeSegments[pos[0]].node_id[1] = NodeNew Netz.PipeSegments[pos[0]].lat[-1] = lat Netz.PipeSegments[pos[0]].long[-1] = long Netz.PipeSegments[pos[0]].country_code[-1] = cc Netz.PipeSegments[pos[0]].country_code[-1] = cc Netz.PipeSegments[pos[0]].param['length'] = M_Projection.LatLong2DistanceValue(Netz.PipeSegments[pos[0]].lat[0], Netz.PipeSegments[pos[0]].long[0], lat, long) # Node Netz.Nodes.append(K_Component.Nodes(id = NodeNew, name = NodeNew, source_id = ['LKD_' + PipeID], node_id = ['N_' + NodeNew], country_code = cc, lat = lat, long = long, param = {'comp_units': 0, 'operator_name' : None, 'is_import' : 0, 'is_export' : 0, 'H_L_conver' : 0, 'operator_Z' : None, 'compressor' : [], 'entsog_key' : None, 'is_crossBorder': 0, 'ugs' : 0, 'production' : 0, 'exact' : 2, 'license' : 'open data'})) else: print('M_LKD.changePipeSegments: something wrong here too') else: print('M_LKD.changePipeSegments: something wrong here') Netz.select_byAttrib(['PipeSegments'], 'id', '-9999', '!=') return Netz
def join_Component_Meta(Elemente, Meta_Elemente, Meta_Namen, Meta_Typen, Method_Name): """ Function to join elements (**Elemente**) with meta data of elements **Meta_Elemente**. \n.. comments: Input: Elemente: Gas Netzwerk elements (topological information) Meta_Elemente: Information of Meta data por pipelines Meta_Typen: Variable type of the different Meta_Elemente (e.g. text, real) Meta_Namen: Variabele namen of the Meta_Elemente Meta_Typen: List of strings indicating the type of data. Return: Elemente: Gas Netzwerk elements, linked with Meta daten. """ # Initializierung von Variabeln Meta_comp_ids = M_Helfer.get_attribFromList(Meta_Elemente, 'comp_id') countEle = 0 posKeep = [] posMeta = [] try: for ele in Elemente: countMet = 0 dieserWert = ele.id diserNodeID = ele.node_id pos = M_FindPos.find_pos_StringInList(dieserWert, Meta_comp_ids) if len(pos) > 0: posMeta.append(pos[0]) posKeep.append(countEle) for idx, metName in enumerate(Meta_Namen): if metName != 'comp_id' and metName != 'id': # Check if param if len(Method_Name[idx]) > 0: Elemente[countEle].param.update( { metName: getattr(Meta_Elemente[pos[0]], metName) } ) # setattr(Elemente[countEle], metName, getattr(Meta_Elemente[pos[0]], metName)) if getattr( Meta_Elemente[pos[0]], metName ) == None: # getattr(Meta_Elemente[pos[0]], metName) == None: Elemente[countEle].param.update( {metName: None} ) # setattr(Elemente[countEle], metName, None) # Non param else: setattr(Elemente[countEle], metName, getattr(Meta_Elemente[pos[0]], metName)) if getattr(Meta_Elemente[pos[0]], metName) == None: setattr(Elemente[countEle], metName, None) Elemente[countEle].node_id = diserNodeID Elemente[countEle].id = dieserWert countMet = countMet + 1 countEle = countEle + 1 Temp = K_Netze.NetComp() Temp.Temp = Elemente Temp.select_byPos('Temp', posKeep) Elemente = Temp.Temp except: print("ERROR: M_Verknuepfe.join_Component_Meta") raise return Elemente
def read_PipeLines(NumDataSets=1e+100, RelDirName='Eingabe/InternetDaten/'): """ Reading of pipeline information from CSV file. Number of pipelines to read given with **NumDataSets**, and location of relative path folder is **RelDirName** \n.. comments: Input: NumDataSets: Maximum number of elements to be read (default = 1e+100) RelDirName: String containing relative directory name (default = 'Eingabe/InternetDaten/') Return: PipeLines: PipeLines component """ # Initializierung von Variabeln id = [] name = [] node_id = [] meta_id = [] source_id = [] PipeLines = [] dataFolder = Path.cwd() filename = dataFolder / RelDirName # Opening file and reading header lines FileName = str(filename / 'Loc_PipePoints.csv') if os.path.exists(FileName): fid = open(FileName, 'r', encoding="utf-8") for ii in list(range(1 + 2)): fid.readline() # reading with CSV csv_reader = csv.reader(fid, delimiter=";") for row in csv_reader: id.append(row[0]) source_id.append(''.join([ID_Add, str(row[0])])) name.append(row[1]) node_id.append(row[2]) meta_id.append(row[3]) # schliezen der CSV Datei fid.close() # Initializieren von Variabeln countLeitung = 0 countLine = 0 MaxNum = len(name) # #max_pressure_bar oder pressure Hat hier nix verloren while countLine < MaxNum: PipeLines.append( K_Component.PipeLines(id=None, name='', node_id=[], country_code=None, source_id=[], lat=None, long=None)) dieserLeitungsName = name[countLine] # LeitungsNamen dieserPunktName = node_id[countLine] # LeitungsNamen dieserMet_id = meta_id[countLine] dieserid = id[countLine] dieserSource_id = source_id[countLine] PipeLines[countLeitung].id = dieserid PipeLines[countLeitung].name = dieserLeitungsName PipeLines[countLeitung].node_id = [dieserPunktName] # PunktNamen PipeLines[countLeitung].source_id = [dieserSource_id] PipeLines[countLeitung].param['meta_id'] = dieserMet_id # Kreiert restliche list von LeitungsNamen allLeitungsNames = name[countLine + 1:] if countLeitung == 31: countLeitung = countLeitung pos = M_FindPos.find_pos_StringInList(dieserLeitungsName, allLeitungsNames) if len(pos) == 1: dieserPunktName = node_id[countLine + 1 + pos[0]] PipeLines[countLeitung].node_id.append(dieserPunktName) elif len(pos) > 1: dieserPunktName = node_id[countLine + 1 + pos[len(pos) - 1]] pos = pos[0:len(pos) - 1] for p in pos: PipeLines[countLeitung].node_id.append(node_id[countLine + 1 + p]) PipeLines[countLeitung].node_id.append(dieserPunktName) pos.append(0) else: print('Leitung defekt') countLeitung = countLeitung + 1 countLine = countLine + 1 + len(pos) # push user steop based on NumDataSets if countLeitung > NumDataSets: return PipeLines return PipeLines
def schrott(): G_Set_1 = [] G_Set_2 = [] length_Set_1 = [] length_Set_2 = [] minimum_Diff_Val = [] edge_id_set_1 = [] PercDiff = [] minimum_Diff_Index = [] nodes_id_Set_1 = [] nodes_id_Set_2 = [] nodeID_Set_1_Friends = [] nodeID_Set_2_Friends = [] cutoff = [] dist_matrix_Diff = [] print('set 1 ', nodeID_Set_1_Friends[minimum_Diff_Index[0]]) print('set 1 ', nodeID_Set_1_Friends[minimum_Diff_Index[1]]) print(' ') print('set 2 ', nodeID_Set_2_Friends[minimum_Diff_Index[0]]) print('set 2 ', nodeID_Set_2_Friends[minimum_Diff_Index[1]]) print(' ') minimum_Diff_Index = np.unravel_index( np.argmin(dist_matrix_Diff, axis=None), dist_matrix_Diff.shape) print('set 1 ', nodeID_Set_1_Friends[minimum_Diff_Index[0]]) print('set 1 ', nodeID_Set_1_Friends[minimum_Diff_Index[1]]) print(' ') print('set 2 ', nodeID_Set_2_Friends[minimum_Diff_Index[0]]) print('set 2 ', nodeID_Set_2_Friends[minimum_Diff_Index[1]]) edge_distMatrix = makeRelDistMatrix(length_Set_1, length_Set_2) minimum_Diff_Index = np.unravel_index( np.argmin(edge_distMatrix, axis=None), edge_distMatrix.shape) minimum_Diff_Val = edge_distMatrix.min() print(' ') figNum = 1 fig = plt.figure(figNum) NX.draw(G_Set_1, NX.get_node_attributes(G_Set_1, 'pos'), node_size=7) fig.show() figNum = figNum + 1 # jumping into while loop, as long as relative distance value is smaller than threshold while len(length_Set_1) > 0 and len( length_Set_2) > 0 and minimum_Diff_Val < PercDiff: ####[] print('While Loop') print(edge_id_set_1) # 1) removing edges from network 1 for u, v, key, data in list(G_Set_1.edges(None, data=True, keys=True)): if data['id'] in edge_id_set_1[minimum_Diff_Index[0]]: print('removing edge: ', data['id']) # finding nodes set pos = M_FindPos.find_pos_StringInTouple( data['id'][0], edge_id_set_1[minimum_Diff_Index[0]]) nodes = nodes_id_Set_1[minimum_Diff_Index[0]][pos[0]] # Disecting nodes string pos = M_FindPos.find_pos_CharInStr(',', nodes) node1 = nodes[:pos[0]] node2 = nodes[pos[0] + 1:] # Removing Edge G_Set_1.remove_edge(node1, node2, key=key) # Now moving node position if same node in both data sets if node1 in nodeID_Set_1_Friends: pos = M_FindPos.find_pos_StringInList( node1, nodeID_Set_1_Friends) lat = G_Set_2.node[nodeID_Set_2_Friends[pos[0]]]['pos'][1] long = G_Set_2.node[nodeID_Set_2_Friends[pos[0]]]['pos'][0] G_Set_1.node[node1]['pos'] = (long, lat) if node2 in nodeID_Set_1_Friends: pos = M_FindPos.find_pos_StringInList( node2, nodeID_Set_1_Friends) lat = G_Set_2.node[nodeID_Set_2_Friends[pos[0]]]['pos'][1] long = G_Set_2.node[nodeID_Set_2_Friends[pos[0]]]['pos'][0] G_Set_1.node[node2]['pos'] = (long, lat) # Plotting resulting network fig = plt.figure(figNum) NX.draw(G_Set_1, NX.get_node_attributes(G_Set_1, 'pos'), node_size=7) fig.show() figNum = figNum + 1 # 2) rerunning get_matchingPath print('execution of get_PathInfo in while loop') lat_Set_1, long_Set_1, length_Set_1, edge_id_set_1, nodes_id_Set_1 = get_PathInfo( G_Set_1, nodeID_Set_1_Friends[minimum_Diff_Index[0]], nodeID_Set_1_Friends[minimum_Diff_Index[1]], cutoff=cutoff) lat_Set_2, long_Set_2, length_Set_2, edge_id_set_2, nodes_id_Set_2 = get_PathInfo( G_Set_2, nodeID_Set_2_Friends[minimum_Diff_Index[0]], nodeID_Set_2_Friends[minimum_Diff_Index[1]], cutoff=cutoff) if len(lat_Set_2) > 0: edge_distMatrix = makeRelDistMatrix(length_Set_1, length_Set_2) minimum_Diff_Index = np.unravel_index( np.argmin(edge_distMatrix, axis=None), edge_distMatrix.shape) minimum_Diff_Val = edge_distMatrix.min() else: minimum_Diff_Val = PercDiff * 2 ########################################################################### # Joining the two networks ########################################################################### G_Set_Sum = NX.compose(G_Set_1, G_Set_2) # removing nodes of degree zero deg = G_Set_Sum.degree(G_Set_Sum) for n in list(G_Set_Sum.nodes()): if deg[n] == 0: G_Set_Sum.remove_node(n) # converting graph into network G_Netz = M_Graph.Graph2Netz(G_Set_Sum) print('leaving function') return G_Netz, G_Set_Sum
def read(RelDirName='Eingabe/CSV/', NumDataSets=1e+100, skiprows=[]): """Description: ------------ Reads Data from folder CSV_Path into Grid Grid = Instance of Netz Class Input Parameter: ---------------- RelDirName string containing path name of data location [default: 'Eingabe/CSV/'] NumDataSets Number of elements to be read for each component [default: 1e+100] skiprows number of rows to skip [default: []] Return Parameters: ------------------ Grid instance of class K_Netze.Netz, populated with data from CSV files """ # Dir name stuff DirName = Path.cwd() / RelDirName Grid = K_Netze.NetComp() FileList = K_Netze.NetComp().CompLabels() for key in FileList: count = 0 filename = 'Gas_' + key + '.csv' CSV_File = str(DirName / filename) # Z set to zero if file does not exist Z = CSV_2_list(CSV_File, skiprows=skiprows) if len(Z) > 0: for entry in Z: Keys = list(entry.keys()) Vals = list(entry.values()) for ii in range(len(Vals)): if Vals[ii] == 'None': Vals[ii] = None elif type(Vals[ii]) is float: if math.isnan(Vals[ii]): Vals[ii] = None else: try: Vals[ii] = float(Vals[ii]) except: pass pos_Id = M_FindPos.find_pos_StringInList('id', Keys) pos_Name = M_FindPos.find_pos_StringInList('name', Keys) pos_SId = M_FindPos.find_pos_StringInList('source_id', Keys) pos_Node = M_FindPos.find_pos_StringInList('node_id', Keys) pos_CC = M_FindPos.find_pos_StringInList('country_code', Keys) pos_lat = M_FindPos.find_pos_StringInList('lat', Keys) pos_long = M_FindPos.find_pos_StringInList('long', Keys) pos_comm = M_FindPos.find_pos_StringInList('comment', Keys) pos_para = M_FindPos.find_pos_StringInList('param', Keys) pos_meth = M_FindPos.find_pos_StringInList('method', Keys) pos_unce = M_FindPos.find_pos_StringInList('uncertainty', Keys) pos_tags = M_FindPos.find_pos_StringInList('tags', Keys) del entry['id'] del entry['name'] del entry['source_id'] del entry['node_id'] del entry['country_code'] del entry['lat'] del entry['long'] del entry['comment'] del entry['param'] del entry['method'] del entry['uncertainty'] del entry['tags'] id = Vals[pos_Id[0]] name = Vals[pos_Name[0]] source_id = makeList(Vals[pos_SId[0]]) node_id = makeList(Vals[pos_Node[0]]) country_code = makeList(Vals[pos_CC[0]]) lat = Vals[pos_lat[0]] if isinstance(lat, str): lat = eval(lat) long = Vals[pos_long[0]] if isinstance(long, str): long = eval(long) comment = Vals[pos_comm[0]] param = eval(Vals[pos_para[0]].replace(': nan,', ': float(\'nan\'),')) method = eval(Vals[pos_meth[0]].replace( ': nan,', ': float(\'nan\'),')) uncertainty = eval(Vals[pos_unce[0]].replace( ': nan,', ': float(\'nan\'),')) tags = eval(Vals[pos_tags[0]].replace(': nan,', ': float(\'nan\'),')) Grid.__dict__[key].append(K_Component.__dict__[key]( id=id, name=name, source_id=source_id, node_id=node_id, country_code=country_code, param=param, lat=lat, long=long, method=method, uncertainty=uncertainty, tags=tags, comment=comment)) count = count + 1 if count >= NumDataSets: break else: Grid.__dict__[key] = [] return Grid