def gen_component(dataType, CompIn): """ Generates a netz component from existing components of this netz, e.g. generation of of nodes list from Segments. Component name to be generated suplied as string **dataType**, with current options implemented *Nodes* Data supplied via component **CompIn**. \n.. comments: Input: dataType: string containing name of component to be created e.g. 'Nodes' CompIn: netz class instance Return: ReturnComponent: component list. """ Nodes = [] if 'Nodes' == dataType: for comp in CompIn: Nodes.append( K_Component.Nodes(id=comp.id, node_id=comp.node_id, name=comp.name, source_id=comp.source_id, country_code=comp.country_code, lat=None, long=None)) return Nodes
def getMatch_LatLong_Threshold(comp_0, comp_1, methodVal = 50000): """Gets the separation between two points, and then checks if distance is smaller than **methodVal**. If Trued, then returns 100, if false, then returns 0 """ # Initialization RetVal = 0 # Netz_0 is empty if comp_0 == '': pass # Netz_1 is empty elif comp_1 == '': pass elif comp_0.long == None: pass elif comp_1.long == None: pass # Both Netze contain components else: # Creation of LatLong "vector" from component latlong latlong_Netz_0 = K_Comp.PolyLine(lat = [comp_0.lat], long = [comp_0.long] ) #M_Netze.get_latlongPairs_Points(comp_0) thisLatLong = K_Comp.PolyLine(lat = comp_1.lat, long = comp_1.long ) #M_Netze.get_latlongPairs_Points(comp_1) [pos, minVal] = M_FindPos.find_pos_closestLatLongInList(thisLatLong, latlong_Netz_0) if math.isnan(minVal): RetVal = 0 elif minVal <= methodVal: RetVal = 100 else: RetVal = 0 # Testig if nan, if so then set to zero if math.isnan(RetVal) : RetVal = 0 return RetVal
def get_latlongPairs_Points(Components): """Returns lat long valus from **Components**, in format latlong.lat and latlong.long \n.. comments: Input: Components list of elements of a single components Return: latlong: latlong.lat and latlong.long are lists of lat and long values """ latlong = K_Component.PolyLine(lat=[], long=[]) for comp in Components: latlong.lat.append(comp.lat) latlong.long.append(comp.long) return latlong
def gen_component(dataType, CompIn): """ Generates a netz component from existing components of this netz, e.g. generation of of nodes list from Segments. Component name to be generated suplied as string **dataType**, with current options implemented *Nodes* Data supplied via component **CompIn**. \n.. comments: Input: dataType: string containing name of component to be created e.g. 'Nodes' CompIn: netz class instance Return: ReturnComponent: component list. """ Nodes = [] if 'Nodes' == dataType: for comp in CompIn: Nodes.append( K_Component.Nodes( id=comp.id, node_id=comp.node_id, name=comp.name, country_code=comp.country_code, lat=comp.lat, long=comp.long, source_id=comp.source_id, param={ 'name_short': comp.param['name_short'], 'operator_name': comp.param['operator_name'], 'status': comp.param['status'], 'start_year': comp.param['start_year'], 'max_cap_store2pipe_GWh_per_d': comp.param['max_cap_store2pipe_GWh_per_d'], 'max_cap_pipe2store_GWh_per_d': comp.param['max_cap_pipe2store_GWh_per_d'], 'max_workingGas_TWh': comp.param['max_workingGas_TWh'] })) return Nodes
def read_component(ComponentName, NumDataSets=1e+100, CheckLatLong=0, RelDirName=None): """ Reading of point file from CSV files. Information is supplied via config parser **Info_EinLesen**, string **DataName** of what to read, e.g. 'Compressors', or 'Nodes', and **CheckLatLong** being a boolean to check if lat/long were available. Further relative path name of where CSV files located given through **RelDirName**. .. comments: Input: ComponentName: String of component name NumDataSets: Max number of values ot be read in (default 1e+100) CheckLatLong: Boolean (1/0) if value of lat/long shall be checked (default = 0) RelDirName: String of relative file location (default = None) Return: Nodes: Information of the Nodes elements """ # Initializierung von Variabeln Punkte = [] countLine = 0 if ComponentName == "BorderPoints": DateiName = str(RelDirName / 'Loc_BorderPoints.csv') elif ComponentName == "Compressors": DateiName = str(RelDirName / 'Loc_Compressors.csv') elif ComponentName == "Consumers": DateiName = str(RelDirName / 'Loc_Consumers.csv') elif ComponentName == "EntryPoints": DateiName = str(RelDirName / 'Loc_EntryPoints.csv') elif ComponentName == "InterConnectionPoints": DateiName = str(RelDirName / 'Loc_InterConnectionPoints.csv') elif ComponentName == "LNGs": DateiName = str(RelDirName / 'Loc_LNGs.csv') elif ComponentName == "Nodes": DateiName = str(RelDirName / 'Loc_Nodes.csv') elif ComponentName == "Storages": DateiName = str(RelDirName / 'Loc_Storages.csv') else: print('ERROR: M_Internet.read_Points: type ' + ComponentName + ' nicht definiert') raise if not os.path.exists(DateiName): print(DateiName + ' does not exist\n') else: # opening file fid = open(DateiName, 'r', encoding="utf-8", errors="ignore") # fid = open(DateiName, "r", encoding = "iso-8859-15", errors = "replace") for ii in list(range(1 + 2)): fid.readline() # reading with CSV csv_reader = csv.reader(fid, delimiter=";") try: if 'Nodes' in ComponentName: for row in csv_reader: id = row[0] source_id = [''.join([ID_Add, str(id)])] name = id comment = row[1] country = row[2] lat = row[3] long = row[4] exact = row[5] node_id = [id] Punkte.append( K_Component.Nodes(id=id, name=name, node_id=node_id, source_id=source_id, country_code=country, lat=float(lat), long=float(long), comment=comment, param={'exact': exact})) countLine = countLine + 1 if countLine > NumDataSets: fid.close() return Punkte else: for row in csv_reader: id = row[0] source_id = [''.join([ID_Add, str(id)])] name = id comment = row[1] node_id = [row[2]] country_code = None Punkte.append( K_Component.Nodes(id=id, name=name, node_id=node_id, source_id=source_id, country_code=country_code, lat=float('nan'), long=float('nan'), comment=comment, param={})) countLine = countLine + 1 if countLine > NumDataSets: fid.close() return Punkte except Exception as inst: print(countLine) print(type(inst)) # the exception instance print(inst.args) # arguments stored in .args print(inst) print(str(inst)) raise # schliessen der CSV Datei fid.close() return Punkte
def read(NumDataSets=100000, requeYear='', licenseType='', GasType='H', RelDirName='Eingabe/InternetDaten/'): """ Reading in Internet data sets from Internet specific CSV file, with **NumDataSets** maximum number of records to read, and **requeYear** for which year to get data. \n.. comments: Input: NumDataSets: (Optional = 100000) number of data sets requeYear: (Optional = '2010') string containing year [####] for which data is to be retrieved licenseType: (Optional = ''), string containing the kind of license that the data will be selected on GasType: (Optional = 'H') a character indicating either H or L gas. RelDirName: string, containing the relatie dir name where the Internet data can be loaded from. Return: Ret_Data: Element of K_Netze.NetComp() class, being the SciGRID_gas component data set class """ Filter = {"year": requeYear, "license": licenseType, "GasType": GasType} Ret_Data = K_Netze.NetComp() MD = K_Component.MetaData() RelDirName = Path(RelDirName) # Reading Raw Data Ret_Data.Nodes = read_component("Nodes", NumDataSets, 0, RelDirName=RelDirName) Ret_Data.BorderPoints = read_component("BorderPoints", NumDataSets, 0, RelDirName=RelDirName) Ret_Data.Compressors = read_component("Compressors", NumDataSets, 0, RelDirName=RelDirName) #Ret_Data.Consumers = read_component("Consumers", NumDataSets, 0, RelDirName = RelDirName) Ret_Data.EntryPoints = read_component("EntryPoints", NumDataSets, 0, RelDirName=RelDirName) Ret_Data.InterConnectionPoints = read_component("InterConnectionPoints", NumDataSets, 0, RelDirName=RelDirName) Ret_Data.LNGs = read_component("LNGs", NumDataSets, 0, RelDirName=RelDirName) Ret_Data.Storages = read_component("Storages", NumDataSets, 0, RelDirName=RelDirName) Ret_Data.PipeLines = read_PipeLines(NumDataSets, RelDirName=RelDirName) # Meta Data [ MD.BorderPoints_Meta, MD.BorderPoints_Meta_Type, MD.BorderPoints_Meta_Name, MD.BorderPoints_methodName ] = read_Meta("BorderPoints", RelDirName=RelDirName) [ MD.Compressors_Meta, MD.Compressors_Meta_Type, MD.Compressors_Meta_Name, MD.Compressors_methodName ] = read_Meta("Compressors", RelDirName=RelDirName) [ MD.EntryPoints_Meta, MD.EntryPoints_Type, MD.EntryPoints_Meta_Name, MD.EntryPoints_methodName ] = read_Meta("EntryPoints", RelDirName=RelDirName) [MD.LNGs_Meta, MD.LNGs_Meta_Type, MD.LNGs_Meta_Name, MD.LNGs_methodName] = read_Meta("LNGs", RelDirName=RelDirName) [ MD.PipeLines_Meta, MD.PipeLines_Meta_Type, MD.PipeLines_Meta_Name, MD.PipePoints_methodName ] = read_Meta("PipePoints", RelDirName=RelDirName) [ MD.Storages_Meta, MD.Storages_Meta_Type, MD.Storages_Meta_Name, MD.Storages_methodName ] = read_Meta("Storages", RelDirName=RelDirName) [ MD.InterConnectionPoints_Meta, MD.InterConnectionPoints_Meta_Type, MD.InterConnectionPoints_Meta_Name, MD.InterConnectionPoints_methodName ] = read_Meta("InterConnectionPoints", RelDirName=RelDirName) # Filter of Data MD.BorderPoints_Meta = M_Filter.filter_Daten(Filter, MD.BorderPoints_Meta) MD.Compressors_Meta = M_Filter.filter_Daten(Filter, MD.Compressors_Meta) MD.EntryPoints_Meta = M_Filter.filter_Daten(Filter, MD.EntryPoints_Meta) MD.InterConnectionPoints_Meta = M_Filter.filter_Daten( Filter, MD.InterConnectionPoints_Meta) MD.LNGs_Meta = M_Filter.filter_Daten(Filter, MD.LNGs_Meta) MD.PipeLines_Meta = M_Filter.filter_Daten(Filter, MD.PipeLines_Meta) MD.Storages_Meta = M_Filter.filter_Daten(Filter, MD.Storages_Meta) # Part of joining elements. Ret_Data.BorderPoints = join_Component_Meta(Ret_Data.BorderPoints, MD.BorderPoints_Meta, MD.BorderPoints_Meta_Name, MD.BorderPoints_Meta_Type, MD.BorderPoints_methodName) Ret_Data.Compressors = join_Component_Meta(Ret_Data.Compressors, MD.Compressors_Meta, MD.Compressors_Meta_Name, MD.Compressors_Meta_Type, MD.Compressors_methodName) Ret_Data.EntryPoints = join_Component_Meta(Ret_Data.EntryPoints, MD.EntryPoints_Meta, MD.EntryPoints_Meta_Name, MD.EntryPoints_Type, MD.EntryPoints_methodName) Ret_Data.InterConnectionPoints = join_Component_Meta( Ret_Data.InterConnectionPoints, MD.InterConnectionPoints_Meta, MD.InterConnectionPoints_Meta_Name, MD.InterConnectionPoints_Meta_Type, MD.InterConnectionPoints_methodName) Ret_Data.LNGs = join_Component_Meta(Ret_Data.LNGs, MD.LNGs_Meta, MD.LNGs_Meta_Name, MD.LNGs_Meta_Type, MD.LNGs_methodName) Ret_Data.Storages = join_Component_Meta(Ret_Data.Storages, MD.Storages_Meta, MD.Storages_Meta_Name, MD.Storages_Meta_Type, MD.Storages_methodName) Ret_Data.PipeLines = join_PipeLine_Meta(Ret_Data.PipeLines, MD.PipeLines_Meta, MD.PipeLines_Meta_Name, MD.PipeLines_Meta_Type, MD.PipePoints_methodName) # Creation of PipeSegments and PipePoints Ret_Data.PipeLines2PipeSegments() Ret_Data.PipeSegments2PipePoints() # Unit conversion Ret_Data.MoveUnits('LNGs', 'storage_LNG_Mt', 'max_workingGas_M_m3', replace=True) Ret_Data.MoveUnits('LNGs', 'max_cap_store2pipe_M_m3_per_a', 'max_cap_store2pipe_M_m3_per_d', replace=True) Ret_Data.MoveUnits('Compressors', 'max_cap_M_m3_per_h', 'max_cap_M_m3_per_d', replace=True) Ret_Data.MoveUnits('Storages', 'max_cap_pipe2store_GWh_per_d', 'max_cap_pipe2store_M_m3_per_d', replace=True) Ret_Data.MoveUnits('Storages', 'max_cap_store2pipe_GWh_per_d', 'max_cap_store2pipe_M_m3_per_d', replace=True) Ret_Data.MoveUnits('Storages', 'max_workingGas_TWh', 'max_workingGas_M_m3', replace=True) # Removing attributes Ret_Data.removeAttrib('PipeSegments', ['meta_id']) Ret_Data.removeAttrib('LNGs', ['storage_LNG_Mt', 'max_cap_store2pipe_M_m3_per_a']) Ret_Data.removeAttrib('Compressors', ['max_cap_M_m3_per_h']) Ret_Data.removeAttrib('Storages', [ 'max_cap_pipe2store_GWh_per_d', 'max_cap_store2pipe_GWh_per_d', 'max_workingGas_TWh' ]) Ret_Data.replaceAttribVal('Storages', 'store_type', 'Leeres Gas Feld', 'Depleted Field') Ret_Data.replaceAttribVal('Storages', 'store_type', 'Depleted gas field', 'Depleted Field') Ret_Data.replaceAttribVal('Storages', 'store_type', 'Leeres ?l Feld', 'Depleted Field') Ret_Data.replaceAttribVal('Storages', 'store_type', 'Leeres ?l/Gas Feld', 'Depleted Field') Ret_Data.replaceAttribVal('Storages', 'store_type', 'Leeres Feld', 'Depleted Field') Ret_Data.replaceAttribVal('Storages', 'store_type', 'Salz Kaverne', 'Salt cavern') Ret_Data.replaceAttribVal('Storages', 'store_type', 'Stein Kaverne', 'Rock Cavern') Ret_Data.replaceAttribVal('Storages', 'store_type', 'Leeres ?l Feld mit Gas Haube', 'Depleted Field') # Adding lat long Ret_Data.add_latLong() # removing unwanted components Ret_Data.PipeLines = [] Ret_Data.PipePoints = [] # Assuring that all elements of a component having same attributes, and # keeping track of origin of data Ret_Data.setup_SameAttribs([], None) # Adding further essential attributess Ret_Data.fill_length('PipeSegments') Ret_Data.make_Attrib(['PipeSegments'], 'lat', 'lat_mean', 'mean') Ret_Data.make_Attrib(['PipeSegments'], 'long', 'long_mean', 'mean') # Replacing any '' with None Ret_Data.replace_attrib(compNames=[], attribNames=[], attribValIn='', attribValOut=None) Ret_Data.replace_attrib(compNames=[], attribNames=[], attribValIn='True', attribValOut=1) Ret_Data.replace_attrib(compNames=[], attribNames=[], attribValIn='False', attribValOut=0) # Cleaning up node_id and nodes Ret_Data.merge_Nodes_Comps(compNames=[ 'LNGs', 'Compressors', 'Storages', 'PipeSegments', 'EntryPoints', 'InterConnectionPoints', 'BorderPoints', 'Nodes' ]) Ret_Data.remove_unUsedNodes() Ret_Data.SourceName = ['InterNet'] return Ret_Data
def read_component(DataType='Storages', NumDataSets=100000, requeYear=['2000']): """ Reading in GIE LNGs data sets from API, **NumDataSets** maximum number of records to read, and **requeYear** for which year to get data. **RelDirName** is the relative path name. \n.. comments: Input: DataType: string, containing the data type to read, options are 'LNGs' or 'Storages' (Default = 'Storages') NumDataSets: number of data sets (Default = 100000) requeYear: list of string containing year [####] for which data to be retrieved (Default = '2000') RelDirName: string, containing relative dir name where GIE meta data (default = 'Eingabe/GIE/') Return: ReturnComponent: Data structure of IGU data. """ ReturnComponent = [] if 'Storages' in DataType: # Initialization webCall_1 = 'http://members.igu.org/html/wgc2003/WGC_pdffiles/data/Europe/att/UGS_' # Creation of a Pool Manager http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED', ca_certs=certifi.where()) # Reading for all created storages the data off the web page maxSets = min([169, NumDataSets]) for nn in range(maxSets): time.sleep(0.001 + random.randint(1, 100) / 100000) # information from CSV file thisURL = webCall_1 + str(nn) + '.html' # Get the data URLData = http.request('GET', thisURL) # Convert the data into dict try: if 'Application Error' not in str( URLData.data) and 'Appliance Error' not in str( URLData.data ) and '404 Not Found</title>' not in str(URLData.data): soup = BeautifulSoup(URLData.data, 'html.parser') ii = -0.5 for td in soup.find_all('td'): if ii == 0: id = td.font.string.replace('\n', '').strip() source_id = ['IGU_' + str(id)] node_id = [id] # Stimmt das?? elif ii == 1: name = replaceString( td.font.string.replace('\n', '').strip()) id = name node_id = [id] elif ii == 2: is_abandoned = td.font.string.replace('\n', '').strip() if 'in operation' == is_abandoned: is_abandoned = False else: is_abandoned = True elif ii == 3: country_code = M_Helfer.countryName2TwoLetter( td.font.string.replace('\n', '').strip()) # Germany elif ii == 4: store_type = td.font.string.replace( '\n', '').strip() # Oil/Gasfield elif ii == 5: operator_name = td.font.string.replace( '\n', '').strip() # BEB elif ii == 6: wert = td.font.string.replace('\n', '').replace( ',', '.').strip() if '/' in wert: wert = M_Helfer.string2floats( wert.replace('/', ' ')) start_year = wert[0] elif 'Jan. ' in wert: wert = wert.replace('Jan. ', '') start_year = float(wert) elif len(wert) > 0: start_year = float(wert) # 2001 else: start_year = None # 2001 elif ii == 7: wert = td.font.string.replace('\n', '').replace( ',', '.').strip() if len(wert) > 0: max_workingGas_M_m3 = float(wert) else: max_workingGas_M_m3 = None # [mill m³] 2025 elif ii == 8: wert = td.font.string.replace('\n', '').replace( ',', '.').strip() if len(wert) > 0: max_cushionGas_M_m3 = float( wert) # [mill m³] 2358 else: max_cushionGas_M_m3 = None # [mill m³] 2358 elif ii == 9: wert = td.font.string.replace('\n', '').replace( ',', '.').strip() if len(wert) > 0: max_cap_store2pipe_M_m3_per_d = float( wert) / 1000 * 24 else: max_cap_store2pipe_M_m3_per_d = None # Peak withdrawal capacity [10³ m³/h] 840 elif ii == 10: wert = td.font.string.replace('\n', '').replace( ',', '.').strip( ) # Injection capacity [10³ m³/h] 810 if len(wert) > 0: max_cap_pipe2store_M_m3_per_d = float( wert) / 1000 * 24 else: max_cap_pipe2store_M_m3_per_d = None elif ii == 11: wert = td.font.string.replace('\n', '').strip( ) # Storage formation Solling sandstone middle Bunter if wert == '---': storage_formation = None elif wert == '': storage_formation = None else: storage_formation = wert elif ii == 12: wert = td.font.string.replace('\n', '').replace( ',', '.' ).strip( ) # Storage formation Solling sandstone middle Bunter if len(wert) > 0: structure_depth_m = float( wert ) # Depth top structure, resp. cavern roof [m] 2650 else: structure_depth_m = None # Depth top structure, resp. cavern roof [m] 2650 elif ii == 13: wert = td.font.string.replace('\n', '').replace( ',', '.').strip() if len(wert) > 0: min_storage_pressure_bphBar = float( wert) # Min storage pressure [BHP bar] 90 else: min_storage_pressure_bphBar = None elif ii == 14: wert = td.font.string.replace('\n', '').replace( ',', '.').strip() if len(wert) > 0: max_storage_pressure_bphBar = float( wert ) # Max allowable storage pressure [BHP bar] 460 else: max_storage_pressure_bphBar = None elif ii == 15: wert = td.font.string.replace('\n', '').replace( ',', '.').strip() if wert == '---': net_thickness_m = None # Net thickness [m] 22 elif '..' in wert: wert = M_Helfer.string2floats( wert.replace('..', ' ')) net_thickness_m = sum(wert) / float(len(wert)) elif '/' in wert: wert = M_Helfer.string2floats( wert.replace('/', ' ')) net_thickness_m = sum(wert) / float(len(wert)) elif ' - ' in wert: wert = M_Helfer.string2floats( wert.replace(' - ', ' ')) net_thickness_m = sum(wert) / float(len(wert)) elif '-' in wert: wert = M_Helfer.string2floats( wert.replace('-', ' ')) net_thickness_m = sum(wert) / float(len(wert)) elif len(wert) > 0: net_thickness_m = float( wert) # Net thickness [m] 22 else: net_thickness_m = None # Net thickness [m] 22 elif ii == 16: wert = td.font.string.replace('\n', '').replace( ',', '.').strip() # Porosity [%] 22 if wert == '---': porosity_perc = None elif len(wert) == 0: porosity_perc = None elif '(' in wert and ')' in wert: wert = M_Helfer.string2floats( wert.replace('(', '').replace(')', '').replace( ' - ', ' ')) porosity_perc = sum(wert) / float(len(wert)) elif ' - ' in wert: wert = M_Helfer.string2floats( wert.replace(' - ', ' ')) porosity_perc = sum(wert) / float(len(wert)) elif '/' in wert: wert = M_Helfer.string2floats( wert.replace('/', ' ')) porosity_perc = sum(wert) / float(len(wert)) elif ' -' in wert: wert = wert.replace(' -', ' ') if len(wert) > 1: wert = M_Helfer.string2floats(wert) porosity_perc = sum(wert) / float( len(wert)) else: porosity_perc = None elif '-' in wert: wert = wert.replace('-', ' ') if len(wert) > 1: wert = M_Helfer.string2floats(wert) porosity_perc = sum(wert) / float( len(wert)) else: porosity_perc = None else: porosity_perc = wert elif ii == 17: wert = td.font.string.replace( '\n', '').replace(',', '.').strip().replace( ' mD', '') # Permeability [mD] 10 - 1000 (500) if wert == '---': permeability_mD = None elif len(wert) == 0: permeability_mD = None elif '(' in wert and ')' in wert: wert = M_Helfer.string2floats( wert.replace('(', '').replace(')', '').replace( ' - ', ' ')) permeability_mD = sum(wert) / float(len(wert)) elif ' - ' in wert: wert = M_Helfer.string2floats( wert.replace(' - ', ' ')) permeability_mD = sum(wert) / float(len(wert)) elif '-' in wert: wert = wert.replace('-', ' ') if len(wert) > 1: wert = M_Helfer.string2floats(wert) permeability_mD = sum(wert) / float( len(wert)) else: permeability_mD = None elif '/' in wert: wert = M_Helfer.string2floats( wert.replace('/', ' ')) permeability_mD = sum(wert) / float(len(wert)) else: permeability_mD = wert elif ii == 18: wert = td.font.string.replace('\n', '').replace( ',', '.').strip() if len(wert) > 0: num_storage_wells = int( wert ) # No of storage wells, resp. caverns 15 else: num_storage_wells = None # No of storage wells, resp. caverns 15 elif ii == 19: wert = td.font.string.replace('\n', '').replace( ',', '.').strip() if len(wert) > 0: max_power_MW = float( wert) # Installed compressor power [MW] else: max_power_MW = None # Installed compressor power [MW] ii = ii + 0.5 if len(country_code) > 0 and is_abandoned == False: # creating of component Storage ReturnComponent.append( K_Component.Storages( id=id, name=name, node_id=node_id, source_id=source_id, country_code=country_code, lat=None, long=None, param={ 'store_type': store_type, 'operator_name': operator_name, 'start_year': start_year, 'max_workingGas_M_m3': max_workingGas_M_m3, 'max_cushionGas_M_m3': max_cushionGas_M_m3, 'storage_formation': storage_formation, 'structure_depth_m': structure_depth_m, 'net_thickness_m': net_thickness_m, 'porosity_perc': porosity_perc, 'permeability_mD': permeability_mD, 'num_storage_wells': num_storage_wells, 'max_power_MW': max_power_MW, 'max_cap_store2pipe_M_m3_per_d': max_cap_store2pipe_M_m3_per_d, 'max_cap_pipe2store_M_m3_per_d': max_cap_pipe2store_M_m3_per_d, 'min_storage_pressure_bphBar': min_storage_pressure_bphBar, 'max_storage_pressure_bphBar': max_storage_pressure_bphBar })) if len(ReturnComponent) == 7: pass except: print(CC.Warning + 'Warning: M_IGU.read_component: reading URL failed' + CC.End) pass return ReturnComponent
def read_component(DataType='', NumDataSets=100000, requeYear='', RelDirName=None): """ Reading in GIE LNGs data sets from API, **NumDataSets** maximum number of records to read, and **requeYear** for which year to get data. Relative path name of CSV file location is **RelDirName**. \n.. comments: Input: DataType: string, containing the data type to read, otions are 'LNGs' or 'Storages' NumDataSets: number of data sets to be read in (default = 100000) requeYear: string containing year [####] for which data to be retrieved (default = '2000') RelDirName: string, containing dir name where GIE meta data (default = 'Eingabe/GSE/') Return: ReturnComponent: list of elements of a single component. """ ReturnComponent = [] if 'Storages' in DataType: # Initialization count = 0 FileName = str(RelDirName / 'GSE_Storage.csv') # Reading Meta data from CSV file # connecting to CSV file FileEncoding = "ISO-8859-15" # "utf8" fid = open(FileName, "r", encoding=FileEncoding, errors='ignore') # Reading hearder line for ii in range(23): fid.readline() # Reading next line temp = M_Helfer.strip_accents(fid.readline()[:-1]) while (len(temp) > 0) and (count < NumDataSets): Save = False country_code = temp.split(';')[2] operator_name = temp.split(';')[5] name = temp.split(';')[6] id = temp.split(';')[6] id = id.replace("'", ' ') node_id = [id] source_id = [ID_Add + str(id)] status = temp.split(';')[7] # start_year start_year = temp.split(';')[9] if len(start_year) == 0: start_year = None Save = True else: start_year = int(start_year) if requeYear == '': Save = True elif start_year <= int(requeYear): Save = True # max_workingGas_TWh max_workingGas_TWh = temp.split(';')[14] if len(max_workingGas_TWh) == 0: max_workingGas_TWh = None else: max_workingGas_TWh = float(max_workingGas_TWh) # max_cap_store2pipe_GWh_per_d max_cap_store2pipe_GWh_per_d = temp.split(';')[16] if len(max_cap_store2pipe_GWh_per_d) == 0: max_cap_store2pipe_GWh_per_d = None else: max_cap_store2pipe_GWh_per_d = float( max_cap_store2pipe_GWh_per_d) # max_cap_pipe2store_GWh_per_d max_cap_pipe2store_GWh_per_d = temp.split(';')[20] if len(max_cap_pipe2store_GWh_per_d) == 0: max_cap_pipe2store_GWh_per_d = None else: max_cap_pipe2store_GWh_per_d = float( max_cap_pipe2store_GWh_per_d) is_inEU = temp.split(';')[25] # is_inEU if is_inEU.lower() == 'yes': is_inEU = True else: is_inEU = False inEUMember = temp.split(';')[23] if 'y' == inEUMember and is_inEU == True and Save: name_short = name name_short = name_short.replace('SERENE Nord: ', '') name_short = name_short.replace('VGS SEDIANE B: ', '') name_short = name_short.replace('SERENE SUD', '') name_short = name_short.replace('SEDIANE LITTORAL:', '') name_short = name_short.replace('(XIV-XV)', '') name_short = name_short.replace('(Atwick)', '') name_short = name_short.replace('SEDIANE: ', '') name_short = name_short.replace('GSF ', '') name_short = name_short.replace('VGS ', '') name_short = name_short.replace('Eneco', '') name_short = name_short.replace('Uniper', '') name_short = name_short.replace('HGas', 'H') name_short = name_short.replace('LGas', 'L') name_short = name_short.replace('H-Gas', 'H') name_short = name_short.replace('L-Gas', 'L') name_short = name_short.replace('complex', '') name_short = name_short.replace('Trianel', '') name_short = name_short.replace('Offshore', '') name_short = name_short.replace('/', '') name_short = name_short.replace('-', '') name_short = name_short.replace('?', '') name_short = name_short.replace(':', '') name_short = name_short.replace('\'', '') name_short = name_short.replace('t', 'T') name_short = name_short.replace(' ', '') #'operator_name': operator_name, #'status' : status, # 'start_year': start_year, ReturnComponent.append( K_Component.Storages(name=name, id=id, node_id=node_id, country_code=country_code, lat=None, long=None, source_id=source_id, param={ 'name_short': name_short, 'max_cap_store2pipe_GWh_per_d': max_cap_store2pipe_GWh_per_d, 'max_cap_pipe2store_GWh_per_d': max_cap_pipe2store_GWh_per_d, 'max_workingGas_TWh': max_workingGas_TWh })) count = count + 1 # Reading next line temp = M_Helfer.strip_accents(fid.readline()[:-1]) return ReturnComponent
def read_component(DataType = '', NumDataSets = 1e+100, RelDirName = None): """ Method of reading in LKD components from shape files. **RelDirName** supplies the relative location of the shape files, whereas **DataType** specifies which component is to be reaad in with options 'PipeSegments', 'Nodes', 'Storages', and 'Productions'. \n.. comments: Input: self: self RelDirName: string, containing the relative path name of where data will be loaded from Default = 'Eingabe/LKD/' Return: [] """ ReturnComponent = [] inCoord = 'epsg:31468' outCoord = 'epsg:4326' count = 0 if DataType in 'PipeSegments': # start = time.time() FileName_Shape = str(RelDirName / 'pipelines_utf8.shp') # Loading from shape file Shapes = shapefile.Reader(FileName_Shape, encoding = "utf8") # Malen der Europa Karte # print('there are pipesegments: ' + str(len(Shapes.shapeRecords()))) for shape in Shapes.shapeRecords(): # Getting PolyLine parts = sorted(shape.shape.parts) # Joining X and Y coordinates from Shape.shape.points vec = shape.shape.points polyLine = K_Component.PolyLine(lat = [], long = []) for x,y in vec: polyLine.long.append(x) polyLine.lat.append(y) # Converting to LatLong polyLine = M_Projection.XY2LatLong(polyLine, inCoord, outCoord) # Generation of PipeLine PipeLine = M_Shape.PolyLine2PipeLines(polyLine, parts, source = C_Code, country_code = C_Code) lat = PipeLine[0].lat long = PipeLine[0].long # Getting Meta data id = str(shape.record[0]) source_id = [ID_Add + str(id)] name = replaceString(shape.record[1]) if len(name) == 0: name = 'PS_' + str(id) # Converting gas_type to boolean is_H_gas = shape.record[2] if is_H_gas == 'L': is_H_gas = 0 elif is_H_gas == 'H': is_H_gas = 1 length = float(shape.record[3])/1000 pipe_class_type = shape.record[6] if pipe_class_type == '': pipe_class_type = None # is_virtualPipe is_virtualPipe = False if len(shape.record[4]) > 0: if shape.record[4] == 1: is_virtualPipe = True # diameter_mm if len(shape.record[5]) > 0: if 'NULL' == shape.record[5]: diameter_mm = float('nan') else: diameter_mm = float(shape.record[5]) else: diameter_mm = float('nan') # max_pressure_bar if shape.record[7] == None: max_pressure_bar = float('nan') elif type(shape.record[7]) == int: max_pressure_bar = float(shape.record[7]) if max_pressure_bar > 200: max_pressure_bar = float('nan') elif len(shape.record[7]) > 0: if 'NULL' == shape.record[7]: max_pressure_bar = float('nan') else: max_pressure_bar = float(shape.record[7]) if max_pressure_bar > 200: max_pressure_bar = float('nan') else: max_pressure_bar = float('nan') diam_est = shape.record[8] class_est = shape.record[9] press_est = shape.record[10] if isinstance(diam_est, str): if diam_est == 'NULL': diam_est = float('nan') diam_est_method = 'raw' diam_est_uncertainty = 0 else: diam_est = diam_est diam_est_method = 'raw' diam_est_uncertainty = 0 else: if diam_est == 1: diam_est_method = 'estimated' diam_est_uncertainty = 1 else: diam_est_method = 'raw' diam_est_uncertainty = 0 if isinstance(class_est, str): if class_est == 'NULL': class_est = float('nan') class_est_method = 'raw' class_est_uncertainty = 0 else: class_est = class_est class_est_method = 'raw' class_est_uncertainty = 0 else: if class_est == 1: class_est_method = 'estimated' class_est_uncertainty = 1 else: class_est_method = 'raw' class_est_uncertainty = 0 if isinstance(press_est, str): if press_est == 'NULL': press_est = float('nan') press_est_method = 'raw' press_est_uncertainty = 0 else: press_est_method = 'raw' press_est_uncertainty = 0 else: if press_est == 1: press_est_method = 'estimated' press_est_uncertainty = 1 else: press_est_method = 'raw' press_est_uncertainty = 0 # if isinstance(class_est, str): # if class_est == 'NULL': # class_est = float('nan') # if isinstance(press_est, str): # if press_est == 'NULL': # press_est = float('nan') max_cap_GWh_per_d = shape.record[11] operator_name = str(shape.record[12]) node_id = ['N_' + str(shape.record[13]), 'N_' + str(shape.record[14])] if 'N_809066' in node_id and 'N_809063' in node_id: if node_id[0] == 'N_809066': node_id[0] = 'N_809076' else: node_id[1] = 'N_809076' if 'N_809066' in node_id and 'N_1000001' in node_id: if node_id[0] == 'N_809066': node_id[0] = 'N_809076' else: node_id[1] = 'N_809076' if 'N_809065' in node_id and 'N_809025' in node_id: if node_id[0] == 'N_809065': node_id[0] = 'N_809075' else: node_id[1] = 'N_809075' if 'N_809065' in node_id and 'N_1000001' in node_id: if node_id[0] == 'N_809065': node_id[0] = 'N_809075' else: node_id[1] = 'N_809075' if 'N_809064' in node_id and 'N_809026' in node_id: if node_id[0] == 'N_809064': node_id[0] = 'N_809074' else: node_id[1] = 'N_809074' if 'N_809064' in node_id and 'N_1000001' in node_id: if node_id[0] == 'N_809064': node_id[0] = 'N_809074' else: node_id[1] = 'N_809074' country_code = ['DE', 'DE'] if is_virtualPipe == False: ReturnComponent.append(K_Component.PipeSegments(id = id, name = name, lat = lat, long = long, country_code = country_code, node_id = node_id, source_id = source_id, param = {'max_pressure_bar': max_pressure_bar, 'is_H_gas' : is_H_gas, 'length' : length, 'diameter_mm' : diameter_mm, 'pipe_class_type': pipe_class_type, 'max_cap_GWh_per_d': max_cap_GWh_per_d, 'operator_name' : operator_name}, method = {'diameter_mm' : diam_est_method, 'pipe_class_type' : class_est_method, 'max_pressure_bar' : press_est_method}, uncertainty = {'diameter_mm': diam_est_uncertainty, 'pipe_class_type' : class_est_uncertainty, 'max_pressure_bar' : press_est_uncertainty}, )) count = count + 1 if count > NumDataSets: return ReturnComponent elif DataType in 'Nodes': inCoord = 'epsg:31468' outCoord = 'epsg:4326' FileName_Shape = str(RelDirName / 'nodes_utf8.shp') # Loading from shape file Shapes = shapefile.Reader(FileName_Shape, encoding = "utf8") # Malen der Europa Karte for shape in Shapes.shapeRecords(): id = 'N_' + shape.record[0] source_id = [ID_Add + str(shape.record[0])] name = replaceString(shape.record[1]) operator_name = str(shape.record[2]) is_import = shape.record[3] is_export = shape.record[4] H_L_conver = int(shape.record[5]) operator_Z = shape.record[6] compressor = shape.record[7] compUnit = shape.record[8] if 'NULL' in compUnit: compUnit = 0 elif len(compUnit) == 0: compUnit = 0 else: compUnit = float(compUnit) country_code= shape.record[12] X_coor = shape.record[13] Y_coor = shape.record[14] entsog_nam = str(shape.record[15]) if len(entsog_nam) > 0: name = entsog_nam if name == '': name = 'Ort_' + str(id) entsog_key = shape.record[16] if entsog_key == '': entsog_key = None is_crossBorder = shape.record[17] ugs = shape.record[19] production = shape.record[20] exact = 1 license = 'open data' Line = K_Component.PolyLine(lat = Y_coor, long = X_coor) Line = M_Projection.XY2LatLong(Line, inCoord, outCoord) lat = Line.lat long = Line.long if id == 'N_809066' and country_code == 'AT': id = 'N_809076' elif id == 'N_809065' and country_code == 'AT': id = 'N_809075' elif id == 'N_809064' and country_code == 'AT': id = 'N_809074' ReturnComponent.append(K_Component.Nodes(id = id, node_id = [id], name = name, source_id = source_id, long = long, lat = lat, country_code= country_code, param = {'exact': exact, 'H_L_conver': H_L_conver, 'operator_Z': operator_Z, 'compressor': compressor, 'comp_units': compUnit, 'entsog_key': entsog_key, 'is_crossBorder': is_crossBorder, 'ugs' : ugs, 'production': production, 'operator_name': operator_name, 'is_import' : is_import, 'is_export' : is_export, 'license' : license})) count = count + 1 if count > NumDataSets: return ReturnComponent elif DataType in 'Storages': FileName_Shape = str(RelDirName / 'storages_utf8.shp') # Loading from shape file Shapes = shapefile.Reader(FileName_Shape, encoding = "utf8") # Malen der Europa Karte for shape in Shapes.shapeRecords(): id = 'N_' + shape.record[0] source_id = [ID_Add + str(shape.record[0])] name = replaceString(shape.record[1]) operator_name = str(shape.record[2]) entsog_nam = str(shape.record[9]) if len(entsog_nam) > 0: name = entsog_nam entsog_key = shape.record[10] if entsog_key == '': entsog_key = None max_cap_pipe2store_GWh_per_d = shape.record[11] max_cap_store2pipe_GWh_per_d = shape.record[12] node_id = ['N_' + shape.record[0]] country_code = shape.record[6] ReturnComponent.append(K_Component.Storages(id = id, name = name, source_id = source_id, country_code = country_code, node_id = node_id, param = {'operator_name': operator_name, 'entsog_key' : entsog_key, 'max_cap_pipe2store_GWh_per_d': max_cap_pipe2store_GWh_per_d, 'max_cap_store2pipe_GWh_per_d': max_cap_store2pipe_GWh_per_d})) count = count + 1 if count > NumDataSets: return ReturnComponent elif DataType in 'Productions': FileName_Shape = str(RelDirName / 'productions_utf8.shp') # Loading from shape file Shapes = shapefile.Reader(FileName_Shape, encoding = "utf8") # Malen der Europa Karte for shape in Shapes.shapeRecords(): id = 'N_' + shape.record[0] source_id = [ID_Add + str(shape.record[0])] name = replaceString(shape.record[1]) operator_name = str(shape.record[2]) entsog_nam = str(shape.record[9]) if len(entsog_nam) > 0: name = entsog_nam entsog_key = shape.record[10] if entsog_key == '': entsog_key = None max_production = shape.record[11] node_id = ['N_' + shape.record[0]] country_code = shape.record[6] ReturnComponent.append(K_Component.Productions(id = id, name = name, source_id = source_id, node_id = node_id, country_code = country_code, param = {'entsog_key': entsog_key, 'operator_name': operator_name, 'is_H_gas' : 1, 'max_production_GWh_per_d': max_production})) count = count + 1 if count > NumDataSets: return ReturnComponent return ReturnComponent
def read_component(DataType = '', NumDataSets = 100000, RelDirName = None, sourceName = None, Nodes = []): """ Reading in GasLib data sets from XML file, **NumDataSets** maximum number of records to read, and **requeYear** for which year to get data. Relative path name of CSV file location is **RelDirName**. \n.. comments: Input: DataType: string, containing the data type to read, e.g 'Nodes'. NumDataSets: number of data sets to be read in (Default = 100000). RelDirName: string, containing directory name where GasLib data can be found (Default = 'Eingabe/GSE/'). sourceName: string containing an abbreviation for the source of the data. (Default = None) Nodes: list of nodes. Obsolete!!!! Return: [] """ ReturnComponent = [] if 'GasLib-135' in sourceName: ID_Add = 'GL135_' elif 'GasLib-4197' in sourceName: ID_Add = 'GL4197_' elif 'GasLib-582-v2' in sourceName: XML_fileName = os.path.join(RelDirName, sourceName) XML_fileName = Path(XML_fileName) ID_Add = 'GL582_' elif 'GasLib-134-v2' in sourceName: XML_fileName = os.path.join(RelDirName, sourceName) XML_fileName = Path(XML_fileName) ID_Add = 'GL134_' else: print('ERROR: M_GasLib.read_component: sourceName not known. Program Terminates') return [] schrott = '{http://gaslib.zib.de/Gas}' XML_fileName = os.path.join(RelDirName, sourceName) XML_fileName = Path(XML_fileName) if 'Nodes' in DataType: # Accessing the xml file tree = ET.parse(XML_fileName) root = tree.getroot() # going through each entry for child in root[1]: id = child.attrib['id'] node_id = [id ] if 'alias' in child.attrib.keys(): name = child.attrib['alias'] else: name = [] if len(name) == 0: name = id lat = float(child.attrib['geoWGS84Lat']) long = float(child.attrib['geoWGS84Long']) source_id = [ID_Add + id] country_code = 'DE' elevation_m = None min_pressure_bar= None max_pressure_bar= None for kind in child: if 'height' == kind.tag.replace(schrott, ''): elevation_m = float(kind.attrib['value']) elif 'pressureMin' == kind.tag.replace(schrott, ''): min_pressure_bar = float(kind.attrib['value']) elif 'pressureMax' == kind.tag.replace(schrott, ''): max_pressure_bar = float(kind.attrib['value']) ReturnComponent.append(K_Component.Nodes(id = id, node_id = node_id, name = name, lat = lat, long = long, source_id = source_id, country_code = country_code, param = {'elevation_m': elevation_m, 'min_pressure_bar' : min_pressure_bar, 'max_pressure_bar' : max_pressure_bar})) elif 'EntryPoints' in DataType: # Accessing the XML file tree = ET.parse(XML_fileName) root = tree.getroot() # going through each entry for child in root[1]: id = child.attrib['id'] if 'source' in id: node_id = [id ] name = child.attrib['alias'] if len(name) == 0: name = id lat = float(child.attrib['geoWGS84Lat']) long = float(child.attrib['geoWGS84Long']) source_id = [ID_Add + id] country_code = 'DE' elevation_m = None min_pressure_bar = None max_pressure_bar = None min_cap_M_m3_per_d = None max_cap_M_m3_per_d = None gasTemperature_C = None calorificValue_MJ_per_m3 = None normDensity_kg_per_m3 = None coefficient_A_heatCapacity = None coefficient_B_heatCapacity = None coefficient_C_heatCapacity = None molarMass_kg_per_kmol = None pseudocriticalPressure = None pseudocriticalTemperature = None for kind in child: if 'height' == kind.tag.replace(schrott, ''): elevation_m = float(kind.attrib['value']) elif 'pressureMin' == kind.tag.replace(schrott, ''): min_pressure_bar = float(kind.attrib['value']) elif 'pressureMax' == kind.tag.replace(schrott, ''): max_pressure_bar = float(kind.attrib['value']) elif 'flowMin' == kind.tag.replace(schrott, ''): min_cap_M_m3_per_d = float(kind.attrib['value'])/1000*24 elif 'flowMax' == kind.tag.replace(schrott, ''): max_cap_M_m3_per_d = float(kind.attrib['value'])/1000*24 elif 'gasTemperature' == kind.tag.replace(schrott, ''): gasTemperature_C = float(kind.attrib['value']) elif 'calorificValue' == kind.tag.replace(schrott, ''): calorificValue_MJ_per_m3= float(kind.attrib['value']) elif 'normDensity' == kind.tag.replace(schrott, ''): normDensity_kg_per_m3 = float(kind.attrib['value']) elif 'coefficient_A_heatCapacity' == kind.tag.replace(schrott, ''): coefficient_A_heatCapacity = float(kind.attrib['value']) elif 'coefficient_B_heatCapacity' == kind.tag.replace(schrott, ''): coefficient_B_heatCapacity = float(kind.attrib['value']) elif 'coefficient_C_heatCapacity' == kind.tag.replace(schrott, ''): coefficient_C_heatCapacity = float(kind.attrib['value']) elif 'molarMass' == kind.tag.replace(schrott, ''): molarMass_kg_per_kmol = float(kind.attrib['value']) elif 'pseudocriticalPressure' == kind.tag.replace(schrott, ''): pseudocriticalPressure = float(kind.attrib['value']) elif 'pseudocriticalTemperature' == kind.tag.replace(schrott, ''): pseudocriticalTemperature = float(kind.attrib['value']) ReturnComponent.append(K_Component.EntryPoints(id = id, node_id = node_id, name = name, lat = lat, long = long, source_id = source_id, country_code = country_code, param = {'elevation_m': elevation_m, 'min_pressure_bar' : min_pressure_bar, 'max_pressure_bar' : max_pressure_bar, 'min_cap_M_m3_per_d' : min_cap_M_m3_per_d, 'max_cap_M_m3_per_d' : max_cap_M_m3_per_d, 'gasTemperature_C' : gasTemperature_C, 'calorificValue_MJ_per_m3' : calorificValue_MJ_per_m3, 'normDensity_kg_per_m3' : normDensity_kg_per_m3, 'coefficient_A_heatCapacity': coefficient_A_heatCapacity, 'coefficient_B_heatCapacity': coefficient_B_heatCapacity, 'coefficient_C_heatCapacity': coefficient_C_heatCapacity, 'molarMass_kg_per_kmol' : molarMass_kg_per_kmol, 'pseudocriticalPressure' : pseudocriticalPressure, 'pseudocriticalTemperature' : pseudocriticalTemperature})) elif 'PipeSegments' in DataType: # Initialization tree = ET.parse(XML_fileName) root = tree.getroot() schrott = '{http://gaslib.zib.de/Gas}' # disecting entries from XML file for child in root[2]: id = child.attrib['id'] node_id = [child.attrib['from'], child.attrib['to']] name = child.attrib['alias'] if len(name) == 0: name = id source_id = [ID_Add + id] country_code = 'DE' max_pressure_bar = None min_cap_M_m3_per_d = None max_cap_M_m3_per_d = None length = None diameter_mm = None roughness_mm = None heatTransferCoefficient_W_per_m2_per_K = None for kind in child: if 'flowMin' == kind.tag.replace(schrott, ''): min_cap_M_m3_per_d = float(kind.attrib['value'])/1000*24 elif 'flowMax' == kind.tag.replace(schrott, ''): max_cap_M_m3_per_d = float(kind.attrib['value'])/1000*24 elif 'length' == kind.tag.replace(schrott, ''): length = float(kind.attrib['value']) elif 'diameter' == kind.tag.replace(schrott, ''): diameter_mm = float(kind.attrib['value']) elif 'roughness' == kind.tag.replace(schrott, ''): roughness_mm = float(kind.attrib['value']) elif 'pressure' == kind.tag.replace(schrott, ''): max_pressure_bar = float(kind.attrib['value']) elif 'heatTransferCoefficient' == kind.tag.replace(schrott, ''): heatTransferCoefficient_W_per_m2_per_K = float(kind.attrib['value']) ReturnComponent.append(K_Component.PipeSegments( id = id, name = name, source_id = source_id, node_id = node_id, country_code = country_code, param = {'max_pressure_bar': max_pressure_bar, 'min_cap_M_m3_per_d': min_cap_M_m3_per_d, 'max_cap_M_m3_per_d': max_cap_M_m3_per_d, 'length' : length, 'diameter_mm' : diameter_mm, 'roughness_mm' : roughness_mm, 'heatTransferCoefficient_W_per_m2_per_K' : heatTransferCoefficient_W_per_m2_per_K})) elif 'Compressors' in DataType: # Initialization tree = ET.parse(XML_fileName) root = tree.getroot() # disecting entries from XML file # going through each entry for child in root[2]: id = child.attrib['id'] if 'compressorStation' in id: node_id = [child.attrib['from'] ] source_id = [ID_Add + id] country_code = 'DE' name = child.attrib['alias'] if len(name) == 0: name = id from_node = child.attrib['from'] to_node = child.attrib['to'] energy_node = child.attrib['fuelGasVertex'] loss_pressure_pipe2comp_bar = None loss_pressure_comp2pipe_bar = None min_pressure_pipe2comp_bar = None max_pressure_comp2pipe_bar = None diameter_pipe2comp_mm = None diameter_comp2pipe_mm = None dragFactor_pipe2comp = None dragFactor_comp2pipe = None min_cap_M_m3_per_d = None max_cap_M_m3_per_d = None # has_gasCooler has_gasCooler = float(child.attrib['gasCoolerExisting']) # internalBypassRequired internalBypassRequired = float(child.attrib['internalBypassRequired']) for kind in child: if 'flowMin' == kind.tag.replace(schrott, ''): min_cap_M_m3_per_d = float(kind.attrib['value'])/1000*24 elif 'flowMax' == kind.tag.replace(schrott, ''): max_cap_M_m3_per_d = float(kind.attrib['value'])/1000*24 elif 'pressureLossIn' == kind.tag.replace(schrott, ''): loss_pressure_pipe2comp_bar = float(kind.attrib['value']) elif 'pressureLossOut' == kind.tag.replace(schrott, ''): loss_pressure_comp2pipe_bar = float(kind.attrib['value']) elif 'pressureInMin' == kind.tag.replace(schrott, ''): min_pressure_pipe2comp_bar = float(kind.attrib['value']) elif 'pressureOutMax' == kind.tag.replace(schrott, ''): max_pressure_comp2pipe_bar = float(kind.attrib['value']) elif 'diameterIn' == kind.tag.replace(schrott, ''): diameter_pipe2comp_mm = float(kind.attrib['value']) elif 'diameterOut' == kind.tag.replace(schrott, ''): diameter_comp2pipe_mm = float(kind.attrib['value']) elif 'dragFactorIn' == kind.tag.replace(schrott, ''): dragFactor_pipe2comp = float(kind.attrib['value']) elif 'dragFactorOut' == kind.tag.replace(schrott, ''): dragFactor_comp2pipe = float(kind.attrib['value']) ReturnComponent.append(K_Component.Compressors( id = id, name = name, source_id = source_id, node_id = node_id, country_code = country_code, param = {'from_node': from_node, 'to_node' : to_node, 'has_gasCooler' : has_gasCooler, 'energy_node' : energy_node, 'loss_pressure_pipe2comp_bar': loss_pressure_pipe2comp_bar, 'loss_pressure_comp2pipe_bar': loss_pressure_comp2pipe_bar, 'min_pressure_pipe2comp_bar': min_pressure_pipe2comp_bar, 'max_pressure_comp2pipe_bar': max_pressure_comp2pipe_bar, 'diameter_pipe2comp_mm' : diameter_pipe2comp_mm, 'diameter_comp2pipe_mm' : diameter_comp2pipe_mm, 'dragFactor_pipe2comp' : dragFactor_pipe2comp, 'dragFactor_comp2pipe' : dragFactor_comp2pipe, 'internalBypassRequired' : internalBypassRequired})) return ReturnComponent
def read_component(DataType='LNGs', NumDataSets=100000, requeYear=[2000], DirName=None): """ Reading in GIE LNGs data sets from API, **NumDataSets** maximum number of records to read, and **requeYear** for which year to get data. **RelDirName** is the relative path name. \n.. comments: Input: DataType: string, containing the data type to read, otions are 'LNGs' or 'Storages' NumDataSets: (Optional = 100000) number of data sets requeYear: (Optional = [2000]) list of numbers containing year [####] for which data to be retrieved RelDirName: string, containing relative dir name where GIE meta data default = 'Eingabe/GIE/' Return: ReturnComponent Instance of Component (list of single type elements) """ # dealing with private key ReturnComponent = [] pathPrivKey = os.path.join(os.getcwd(), 'Eingabe/GIE/GIE_PrivateKey.txt') if os.path.isfile(pathPrivKey) is False: print( 'ERROR: M_GIE.read_component: you will need to get a private key from the GIE API.' ) print('Please see documentation for help.') print('No data will be loaded') return ReturnComponent PrivKey = M_Helfer.getLineFromFile(pathPrivKey) if 'LNGs' in DataType: # Initialization webCall_1 = 'https://alsi.gie.eu/api/data/' eic_code = '' count = 0 filename = str(DirName / 'GIE_LNG.csv') print(' LNGs progress:') # Reading Meta data from CSV file # connecting to CSV file fid = open(filename, "r", encoding='iso-8859-15', errors='ignore') # Reading header line fid.readline() # Reading next line temp = M_Helfer.strip_accents(fid.readline()[:-1]) while (len(temp) > 0) and (count < NumDataSets): typeval = temp.split(';')[1] if 'LSO' not in typeval: country_code = temp.split(';')[0] id = temp.split(';')[2] node_id = [id] source_id = [ID_Add + str(id)] facility_code = temp.split(';')[2] name = temp.split(';')[4] name_short = temp.split(';')[5] name_short = replaceString(name_short) ReturnComponent.append( K_Component.LNGs(name=name, id=id, node_id=node_id, source_id=source_id, country_code=country_code, lat=None, long=None, param={ 'facility_code': facility_code, 'name_short': name_short, 'eic_code': eic_code })) count = count + 1 else: eic_code = temp.split(';')[2] # Reading next line temp = M_Helfer.strip_accents(fid.readline()[:-1]) # Creation of a Pool Manager http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED', ca_certs=certifi.where()) # Reading for all created storages the data off the web page #maxSets = min([len(ReturnComponent), NumDataSets]) maxSets = len(ReturnComponent) #for ii in range(96, 100): count = 0 for ii in range(maxSets): # Initialization workingLNGVolume = [] Store2PipeCap = [] # information from CSV file this_facility_code = ReturnComponent[ii].param['facility_code'] this_country_code = ReturnComponent[ii].country_code this_eic_code = ReturnComponent[ii].param['eic_code'] thisURL = webCall_1 + this_facility_code + '/' + this_country_code + '/' + this_eic_code # Get the data URLData = http.request('GET', thisURL, headers={'x-key': PrivKey}) # Convert the data into dict tables = [] try: tables = json.loads(URLData.data.decode('UTF-8')) except: print('ERROR: M_GIE.read_component(LNGs): reading URL failed') return [] # checking that results coming back are ok if tables.__contains__('error'): print( 'GIE load_Storages: something wrong while getting Storage data from GIE' ) #, True) print(tables) # Data allowed to be parsed else: for tt in tables: # Disecting the input for year in requeYear: if (tt['dtmi'] != '-') and (str(year) in tt['gasDayStartedOn']): workingLNGVolume.append( float(tt['dtmi']) * 1000 ) # declared total maximum inventory 1000 m^3 LNG Store2PipeCap.append( float(tt['dtrs']) ) # declared total reference sendout GWh/d (sernd out capacity) # Remove wrong data points workingLNGVolume = M_Helfer.testData(workingLNGVolume, 'PercentAbsDiff', 4, 0) Store2PipeCap = M_Helfer.testData(Store2PipeCap, 'PercentAbsDiff', 4, 0) # Update screen with dot print('.', end='') # Deriving required values from time series ReturnComponent[ii].param.update({ 'max_workingLNG_M_m3': M_MatLab.get_median(workingLNGVolume)[0] / 1000000 }) ReturnComponent[ii].param.update({ 'median_cap_store2pipe_GWh_per_d': M_MatLab.get_median(Store2PipeCap)[0] }) ReturnComponent[ii].param.update({ 'max_cap_store2pipe_GWh_per_d': M_MatLab.get_max(Store2PipeCap)[0] }) count = count + 1 if count > NumDataSets: print(' ') return ReturnComponent elif 'Storages' in DataType: # Initialization webCall_1 = 'https://agsi.gie.eu/api/data/' eic_code = '' count = 0 print(' STORAGES progress:') filename = str(DirName / 'GIE_Storages.csv') # Reading Meta data from CSV file # connecting to CSV file fid = open(filename, "r", encoding="iso-8859-15", errors="surrogateescape") # Reading hearder line fid.readline() # Reading next line temp = M_Helfer.strip_accents(fid.readline()[:-1]) while (len(temp) > 0) and (count < NumDataSets): typeval = temp.split(';')[1] if 'Storage Facility' in typeval: country_code = temp.split(';')[0] id = temp.split(';')[2] node_id = [id] source_id = [ID_Add + str(id)] facility_code = temp.split(';')[2] name = temp.split(';')[4] name_short = temp.split(';')[5] name_short = replaceString(name_short) name_short = name_short.replace(' ', '') name_short = name_short.strip() if 'OudeStatenzijl' in name_short: country_code = 'NL' elif 'KinsaleSouthwest' in name_short: country_code = 'IRL' ReturnComponent.append( K_Component.Storages(name=name, id=id, node_id=node_id, lat=None, long=None, source_id=source_id, country_code=country_code, param={ 'facility_code': facility_code, 'eic_code': eic_code, 'name_short': name_short })) count = count + 1 else: eic_code = temp.split(';')[2] # Reading next line temp = M_Helfer.strip_accents(fid.readline()[:-1]) # Creation of a Pool Manager http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED', ca_certs=certifi.where()) # Reading for all created storages the data off the web page maxSets = min([len(ReturnComponent), NumDataSets]) count = 0 keepPos = [] for ii in range(maxSets): # Initialization max_workingGas_M_m3 = [] Store2PipeCap = [] Pipe2StoreCap1 = [] # information from CSV file this_facility_code = ReturnComponent[ii].param['facility_code'] this_country_code = ReturnComponent[ii].country_code this_eic_code = ReturnComponent[ii].param['eic_code'] thisURL = webCall_1 + this_facility_code + '/' + this_country_code + '/' + this_eic_code # Get the data URLData = http.request('GET', thisURL, headers={'x-key': PrivKey}) # Convert the data into dict tables = [] try: tables = json.loads(URLData.data.decode('UTF-8')) # checking that results coming back are ok if tables.__contains__('error'): print( 'GIE load_Storages: something wrong while getting Storage data from GIE', True) # Data allowed to be parsed else: # print('len(tables[connectionpoints]) ' + str(len(tables['connectionpoints']))) for tt in tables: # Disecting the input for year in requeYear: if (tt['gasInStorage'] != '-') and ( str(year) in tt['gasDayStartedOn']): max_workingGas_M_m3.append( float(tt['workingGasVolume'])) Store2PipeCap.append( float(tt['injectionCapacity'])) Pipe2StoreCap1.append( float(tt['withdrawalCapacity'])) # Remove wrong data sets max_workingGas_M_m3 = M_Helfer.testData( max_workingGas_M_m3, 'PercentAbsDiff', 4, 0) Store2PipeCap = M_Helfer.testData(Store2PipeCap, 'PercentAbsDiff', 4, 0) Pipe2StoreCap = M_Helfer.testData(Pipe2StoreCap1, 'PercentAbsDiff', 4, 0) # Deriving required values from time series # wert, _ = ReturnComponent[ii].param.update({ 'max_workingGas_M_m3': M_MatLab.get_max(max_workingGas_M_m3)[0] }) ReturnComponent[ii].param.update({ 'max_cap_store2pipe_GWh_per_d': M_MatLab.get_max(Store2PipeCap)[0] }) ReturnComponent[ii].param.update({ 'max_cap_pipe2store_GWh_per_d': M_MatLab.get_max(Pipe2StoreCap)[0] }) if math.isnan(ReturnComponent[ii]. param['max_cap_pipe2store_GWh_per_d']): ReturnComponent[ii].param[ 'max_cap_pipe2store_GWh_per_d'] = None if math.isnan(ReturnComponent[ii]. param['max_cap_store2pipe_GWh_per_d']): ReturnComponent[ii].param[ 'max_cap_store2pipe_GWh_per_d'] = None if math.isnan( ReturnComponent[ii].param['max_workingGas_M_m3']): ReturnComponent[ii].param['max_workingGas_M_m3'] = None # Update screen with dot print('.', end='') keepPos.append(ii) count = count + 1 if count > NumDataSets: # Dealing with bad elemebtsm that did not return any URL results tempNetz = K_Netze.NetComp() tempNetz.Storages = ReturnComponent tempNetz.select_byPos('Storages', keepPos) ReturnComponent = tempNetz.Storages print(' ') return ReturnComponent except: print( 'Warning: M_GIE.read_component(Storages): reading URL failed' ) print(' for ', thisURL) # Dealing with bad elemebtsm that did not return any URL results tempNetz = K_Netze.NetComp() tempNetz.Storages = ReturnComponent tempNetz.select_byPos('Storages', keepPos) ReturnComponent = tempNetz.Storages print(' ') return ReturnComponent
def getMatch_LatLong_CountryCode(comp_0, comp_1, method = 'inv', thresholdVal = None): """Gets the separation between two points in km, and returns as 100-1/distance and other methods. Method allows to select different measures of distance returned (distance used here in [km]), from: "inv" (100 / distance), "power2inv" (100 / (distance^2)), "loginv" (100 / log(distance), with base e), "log10inv" (100 / log10(distance), with base 10), "distance" (distance) """ # Initialization RetVal = 0 # Netz_0 is empty if comp_0 == '': RetVal = 0 # Netz_1 is empty elif comp_1 == '': RetVal = 0 elif comp_0.long == None: RetVal = 0 elif comp_1.long == None: RetVal = 0 elif type(comp_0.lat) == str: print('ERROR: M_Matching.getComp_LatLong: input type is string. Float expected. comp_0') RetVal elif type(comp_1.lat) == str: print('ERROR: M_Matching.getComp_LatLong: input type is string. Float expected. comp_1') RetVal = 0 # Both Netze contain components else: cc_Netz_0 = comp_0.country_code cc_Netz_1 = comp_1.country_code if cc_Netz_0 == cc_Netz_1 or cc_Netz_1 == None or cc_Netz_0 == None: # Creation of LatLong "vector" from component latlong latlong_Netz_0 = K_Comp.PolyLine(lat = [comp_0.lat], long = [comp_0.long] ) #M_Netze.get_latlongPairs_Points(comp_0) thisLatLong = K_Comp.PolyLine(lat = comp_1.lat, long = comp_1.long ) #M_Netze.get_latlongPairs_Points(comp_1) [pos, minVal] = M_FindPos.find_pos_closestLatLongInList(thisLatLong, latlong_Netz_0) #minVal = minVal/1000 if minVal == 0.0: RetVal = 100 elif method == 'inv': RetVal = min([100 / minVal, 100]) elif method == 'power2inv': RetVal = 100 / minVal/minVal elif method == 'log10inv': RetVal = 100 / math.log(minVal, 10) elif method == 'loginv': RetVal = 100 / math.log(minVal) elif method == 'distance': RetVal = minVal elif method == 'distanceThreshold': if minVal <= thresholdVal: RetVal = 100 elif method == 'exp': RetVal = 100 * math.exp(-minVal*1000/thresholdVal) else: print('ERROR: M_Matching: get_Comp_LatLong: method not defined.') else: return -100000 # Testig if nan, if so then set to zero if math.isnan(RetVal) : RetVal = 0 return RetVal #def replacePipeSegments(Netz_Main, Netz_Fine, nodeDistance = 10000, lengthDistance = 0.2): # """This function does not do a thing # """ # # Determine which nodes are the same in both data sets # [pos_match_Netz_0, pos_add_Netz_0, pos_match_Netz_1, pos_add_Netz_1] = match( # Netz_Main, Netz_Fine, compName = 'Nodes', threshold = 45, multiSelect = False, # numFuncs = 1, # funcs = ( # lambda comp_0, comp_1: getMatch_LatLong_CountryCode(comp_0, comp_1, method = 'inv') # )) # # # Convert Netz_Fine into NetWorkx # InfoDict = {'Gewichtung': 'Gleich', 'Weight': 'Gleich'} # [Graph_Fine, MDGraph_Fine] = M_Graph.build_nx(InfoDict, Netz_Fine) # [Graph_Main, MDGraph_Main] = M_Graph.build_nx(InfoDict, Netz_Main) # # for pipe1 in Netz_Main.PipeSegments: # # Determine length of network 1 # pair = [pipe1.node_id[0], pipe1.node_id[1]] # length_Main = M_Graph.get_shortest_paths_distances(Graph_Main, pair, edge_weight_name = 'length') # # # Determine length of network 2 # #pos = M_FindPos.find_pos_ValInVector(Val, Vector, Type) # length_Fine = M_Graph.get_shortest_paths_distances(Graph_Fine, pair, edge_weight_name = 'length') # # print('M_Matching.replacePipeSegments: this function need checking, currently does nothing') # # return Netz_Main
def getMatch_LatLong(comp_0, comp_1, method = 'inv'): """Gets the separation between two points in km, and returns as 100-1/distance and other methods. Method allows to select different measures of distance returned (distance used here in [km]), from: "inv" (100 / distance), "power2inv" (100 / (distance^2)), "loginv" (100 / log(distance), with base e), "log10inv" (100 / log10(distance), with base 10), "distance" (distance) """ # Initialization RetVal = 0 # Netz_0 is empty if comp_0 == '': return 0 # Netz_1 is empty elif comp_1 == '': return 0 elif comp_0.long == None: return 0 elif comp_1.long == None: return 0 elif type(comp_0.lat) == str: print('ERROR: M_Matching.getComp_LatLong: input type is string. Float expected. comp_0') elif type(comp_1.lat) == str: print('ERROR: M_Matching.getComp_LatLong: input type is string. Float expected. comp_1') # Both Netze contain components else: # Creation of LatLong "vector" from component latlong latlong_Netz_0 = K_Comp.PolyLine(lat = [comp_0.lat], long = [comp_0.long] ) #M_Netze.get_latlongPairs_Points(comp_0) thisLatLong = K_Comp.PolyLine(lat = comp_1.lat, long = comp_1.long ) #M_Netze.get_latlongPairs_Points(comp_1) [pos, minVal] = M_FindPos.find_pos_closestLatLongInList(thisLatLong, latlong_Netz_0) minVal = minVal/1000 if minVal == 0.0: RetVal = 100 elif method == 'inv': RetVal = min([100 / minVal, 100]) elif method == 'power2inv': RetVal = 100 / minVal/minVal elif method == 'log10inv': RetVal = 100 / math.log(minVal, 10) elif method == 'loginv': RetVal = 100 / math.log(minVal) elif method == 'distance': RetVal = minVal else: print('ERROR: M_Matching: get_Comp_LatLong: method not defined.') # Testig if nan, if so then set to zero if math.isnan(RetVal) : RetVal = 0 return RetVal
def make_DataFrame(Netz, StatsInputDirName, This_AttribNames, thisCompName, ApplyLoad=False): '''Loading data from CSV and converting into data frame ''' # ======================================= # 1. Data mport # ======================================= df_actionMeta = K_Component.DF_Action_Meta() # getting more meta data for Stats processes This_Convert2Float = [] This_RegType = [] This_Simulate = [] This_Load = [] DirName = os.path.join(StatsInputDirName, 'StatsAttribSettings.csv') StatsCompName, AttribName, Load, Simualte, Convert2Float, RegressionType = get_AttribInfo( DirName) if len(This_AttribNames) == 0: for ida, compName in enumerate(StatsCompName): if compName == thisCompName: This_AttribNames.append(AttribName[ida]) This_Convert2Float.append(Convert2Float[ida]) This_RegType.append(RegressionType[ida]) This_Simulate.append(float(Simualte[ida])) # Default setting to Load to 1 if simulation is required if float(Simualte[ida]) == 1: This_Load.append(1) # Othewise get load value from CSV file else: This_Load.append(Load[ida]) df_actionMeta.RegType = This_RegType df_actionMeta.Convert2Float = This_Convert2Float df_actionMeta.AttribNames = This_AttribNames df_actionMeta.Simulate = This_Simulate # ======================================= # 2. Data 2 DataFrame # ======================================= df = pd.DataFrame() if ApplyLoad == False: # converting Netzdata into data frame for attribName in df_actionMeta.AttribNames: #print('attribName', attribName) wert = Netz.get_Attrib(thisCompName, attribName) N = len(wert) NumNaN = 0 if len(wert) == 0: for ii in range(df.shape[0]): wert.append(np.nan) NumNaN = NumNaN + 1 else: for idy, ww in enumerate(wert): if ww == None: wert[idy] = np.nan NumNaN = NumNaN + 1 if N > NumNaN: df[attribName] = np.array(wert) else: # converting Netzdata into data frame Th_Convert2Float = [] Th_RegType = [] Th_Simulate = [] Th_AttribNames = [] for idx, attribName in enumerate(df_actionMeta.AttribNames): #print(' attribName ', attribName ) if This_Load[idx] == 1: wert = Netz.get_Attrib(thisCompName, attribName) N = len(wert) NumNaN = 0 if len(wert) == 0: for ii in range(df.shape[0]): wert.append(np.nan) NumNaN = NumNaN + 1 else: for idy, ww in enumerate(wert): if ww == None: wert[idy] = np.nan NumNaN = NumNaN + 1 # Add only if there is data if N > NumNaN: df[attribName] = np.array(wert) df[attribName] = np.array(wert) Th_AttribNames.append(df_actionMeta.AttribNames[idx]) Th_Convert2Float.append(df_actionMeta.Convert2Float[idx]) Th_RegType.append(df_actionMeta.RegType[idx]) Th_Simulate.append(df_actionMeta.Simulate[idx]) df_actionMeta.AttribNames = Th_AttribNames df_actionMeta.Convert2Float = Th_Convert2Float df_actionMeta.RegType = Th_RegType df_actionMeta.Simulate = Th_Simulate # now adjusting the "df_actionMeta" so that "AttribNames" only contains those ones that shall be simulated Th_AttribNames = [] Th_RegType = [] Th_Convert2Float = [] Th_Simulate = [] for idx, sim in enumerate(df_actionMeta.Simulate): if sim == 1: Th_AttribNames.append(df_actionMeta.AttribNames[idx]) Th_Convert2Float.append(df_actionMeta.Convert2Float[idx]) Th_RegType.append(df_actionMeta.RegType[idx]) Th_Simulate.append(df_actionMeta.Simulate[idx]) df_actionMeta.AttribNames = Th_AttribNames df_actionMeta.Convert2Float = Th_Convert2Float df_actionMeta.RegType = Th_RegType df_actionMeta.Simulate = Th_Simulate return df, df_actionMeta
def read_PipeLines(NumDataSets=1e+100, RelDirName='Eingabe/InternetDaten/'): """ Reading of pipeline information from CSV file. Number of pipelines to read given with **NumDataSets**, and location of relative path folder is **RelDirName** \n.. comments: Input: NumDataSets: Maximum number of elements to be read (default = 1e+100) RelDirName: String containing relative directory name (default = 'Eingabe/InternetDaten/') Return: PipeLines: PipeLines component """ # Initializierung von Variabeln id = [] name = [] node_id = [] meta_id = [] source_id = [] PipeLines = [] dataFolder = Path.cwd() filename = dataFolder / RelDirName # Opening file and reading header lines FileName = str(filename / 'Loc_PipePoints.csv') if os.path.exists(FileName): fid = open(FileName, 'r', encoding="utf-8") for ii in list(range(1 + 2)): fid.readline() # reading with CSV csv_reader = csv.reader(fid, delimiter=";") for row in csv_reader: id.append(row[0]) source_id.append(''.join([ID_Add, str(row[0])])) name.append(row[1]) node_id.append(row[2]) meta_id.append(row[3]) # schliezen der CSV Datei fid.close() # Initializieren von Variabeln countLeitung = 0 countLine = 0 MaxNum = len(name) # #max_pressure_bar oder pressure Hat hier nix verloren while countLine < MaxNum: PipeLines.append( K_Component.PipeLines(id=None, name='', node_id=[], country_code=None, source_id=[], lat=None, long=None)) dieserLeitungsName = name[countLine] # LeitungsNamen dieserPunktName = node_id[countLine] # LeitungsNamen dieserMet_id = meta_id[countLine] dieserid = id[countLine] dieserSource_id = source_id[countLine] PipeLines[countLeitung].id = dieserid PipeLines[countLeitung].name = dieserLeitungsName PipeLines[countLeitung].node_id = [dieserPunktName] # PunktNamen PipeLines[countLeitung].source_id = [dieserSource_id] PipeLines[countLeitung].param['meta_id'] = dieserMet_id # Kreiert restliche list von LeitungsNamen allLeitungsNames = name[countLine + 1:] if countLeitung == 31: countLeitung = countLeitung pos = M_FindPos.find_pos_StringInList(dieserLeitungsName, allLeitungsNames) if len(pos) == 1: dieserPunktName = node_id[countLine + 1 + pos[0]] PipeLines[countLeitung].node_id.append(dieserPunktName) elif len(pos) > 1: dieserPunktName = node_id[countLine + 1 + pos[len(pos) - 1]] pos = pos[0:len(pos) - 1] for p in pos: PipeLines[countLeitung].node_id.append(node_id[countLine + 1 + p]) PipeLines[countLeitung].node_id.append(dieserPunktName) pos.append(0) else: print('Leitung defekt') countLeitung = countLeitung + 1 countLine = countLine + 1 + len(pos) # push user steop based on NumDataSets if countLeitung > NumDataSets: return PipeLines return PipeLines
def changePipeSegments(Netz, RelDirName = 'LKD_NodeChanges.csv'): """Changes some pipe Segments based on an input CSV file """ if os.path.exists(RelDirName): fid = open(RelDirName, 'r', encoding="utf-8", errors = "ignore") # Read header line fid.readline() csv_reader = csv.reader(fid, delimiter = ";") InPipeIds = Netz.get_Attrib(compName = 'PipeSegments', attribName = 'id') for row in csv_reader: # Getting pipe from CSV file PipeID = str(row[0]) #NodeCorrect = row[1] NodeWrong = row[2] NodeNew = row[3] lat = float(row[4]) long = float(row[5]) cc = row[6] # getting corresponding pipeSegment from LKD data set pos = M_FindPos.find_pos_StringInList(String = PipeID, ListOfStrings = InPipeIds) if len(pos) == 1: if NodeNew == 'None': # Removing pipe Netz.PipeSegments[pos[0]].id = '-9999' elif Netz.PipeSegments[pos[0]].node_id[0] == NodeWrong: # PipeSegment from node Netz.PipeSegments[pos[0]].node_id[0] = NodeNew Netz.PipeSegments[pos[0]].lat[0] = lat Netz.PipeSegments[pos[0]].long[0] = long Netz.PipeSegments[pos[0]].country_code[0] = cc Netz.PipeSegments[pos[0]].param['length'] = M_Projection.LatLong2DistanceValue(lat, long, Netz.PipeSegments[pos[0]].lat[-1], Netz.PipeSegments[pos[0]].long[-1]) # Node Netz.Nodes.append(K_Component.Nodes(id = NodeNew, name = NodeNew, source_id = ['LKD_' + PipeID], node_id = ['N_' + NodeNew], country_code = cc, lat = lat, long = long, param = {'comp_units': 0, 'operator_name' : None, 'is_import' : 0, 'is_export' : 0, 'H_L_conver' : 0, 'operator_Z' : None, 'compressor' : [], 'entsog_key' : None, 'is_crossBorder': 0, 'ugs' : 0, 'production' : 0, 'exact' : 2, 'license' : 'open data'})) elif Netz.PipeSegments[pos[0]].node_id[1] == NodeWrong: # PipeSegment to node Netz.PipeSegments[pos[0]].node_id[1] = NodeNew Netz.PipeSegments[pos[0]].lat[-1] = lat Netz.PipeSegments[pos[0]].long[-1] = long Netz.PipeSegments[pos[0]].country_code[-1] = cc Netz.PipeSegments[pos[0]].country_code[-1] = cc Netz.PipeSegments[pos[0]].param['length'] = M_Projection.LatLong2DistanceValue(Netz.PipeSegments[pos[0]].lat[0], Netz.PipeSegments[pos[0]].long[0], lat, long) # Node Netz.Nodes.append(K_Component.Nodes(id = NodeNew, name = NodeNew, source_id = ['LKD_' + PipeID], node_id = ['N_' + NodeNew], country_code = cc, lat = lat, long = long, param = {'comp_units': 0, 'operator_name' : None, 'is_import' : 0, 'is_export' : 0, 'H_L_conver' : 0, 'operator_Z' : None, 'compressor' : [], 'entsog_key' : None, 'is_crossBorder': 0, 'ugs' : 0, 'production' : 0, 'exact' : 2, 'license' : 'open data'})) else: print('M_LKD.changePipeSegments: something wrong here too') else: print('M_LKD.changePipeSegments: something wrong here') Netz.select_byAttrib(['PipeSegments'], 'id', '-9999', '!=') return Netz
def read_component(DataType='', NumDataSets=1e+100, RelDirName=None): """ Method of reading in Norway not infield pipelines from shape files. **RelDirName** supplies the relative location of the shape files, whereas **DataType** specifies which component is to be read in with options 'PipeSegments' and 'Nodes' \n.. comments: Input: DataType String, specifying the component to be read in (default = '') NumDataSets: Number, indicating the maximum number of elements to be read in (default = 1e+100). RelDirName: string, containing the relative path name of where data will be loaded from Default = None Return: [] """ # init variable to return and counter ReturnComponent = [] count = 0 # start and target inCoord = 'epsg:4230' outCoord = 'epsg:4326' # Path to Shapefile FileName_Map = os.path.join(RelDirName, 'pipLine.shp') # Read in shapefile Shapes = shapefile.Reader(FileName_Map) if DataType in 'PipeLines': # go through every pipeline stored in shapefile for shape in Shapes.shapeRecords(): # only read out gas pipelines if 'Gas' == shape.record[11]: # Getting the coordinates of the PipeSegment parts = sorted(shape.shape.parts) # Joining X and Y coordinates from Shape.shape.points vec = shape.shape.points polyLine = K_Component.PolyLine(lat=[], long=[]) for x, y in vec: polyLine.long.append(x) polyLine.lat.append(y) # check if coordinates exists if len(polyLine.long) and len(polyLine.lat): # Converting to LatLong polyLine = M_Projection.XY2LatLong(polyLine, inCoord, outCoord) # Generation of PipeLine PipeLine = M_Shape.PolyLine2PipeLines(polyLine, parts, source=C_Code, country_code=C_Code) for ii in range(len(PipeLine)): PipeLine[ii].id = 'N_' + str(count) PipeLine[ii].source_id = [C_Code + '_' + str(count)] PipeLine[ii].name = shape.record[1] PipeLine[ii].node_id = [ 'N_' + str(count * 2), 'N_' + str(count * 2 + 1) ] PipeLine[ii].param.update({ 'lat_mean': M_MatLab.get_mean(PipeLine[ii].lat)[0] }) PipeLine[ii].param.update({ 'long_mean': M_MatLab.get_mean(PipeLine[ii].long)[0] }) PipeLine[ii].param.update({ 'diameter_mm': convInchToMm(shape.record[13]) }) # convert inches to mm print(convInchToMm(shape.record[13])) count = count + 1 ReturnComponent.extend(PipeLine) if count > NumDataSets: return ReturnComponent elif DataType in 'Nodes': # go through every pipeline stored in shapefile for shape in Shapes.shapeRecords(): # Only read out nodes of gas pipelines if 'Gas' == shape.record[11]: # Getting the coordinates of the PipeSegment parts = sorted(shape.shape.parts) # Joining X and Y coordinates from Shape.shape.points vec = shape.shape.points polyLine = K_Component.PolyLine(lat=[], long=[]) for x, y in vec: polyLine.long.append(x) polyLine.lat.append(y) # check if coordinates exists if len(polyLine.long) and len(polyLine.lat): # Converting to LatLong polyLine = M_Projection.XY2LatLong(polyLine, inCoord, outCoord) # Generation of PipeSegments Segments = M_Shape.PolyLine2PipeSegment( polyLine, parts, source=C_Code, country_code=C_Code) # Generation of the Nodes from PipeSegments # two Nodes per PipeSegment for seg in Segments: id = 'N_' + str(len(ReturnComponent)) name = 'N_' + str(len(ReturnComponent)) node_id = [id] source_id = [C_Code + '_' + str(len(ReturnComponent))] country_code = C_Code lat = seg.lat[0] long = seg.long[0] ReturnComponent.append( K_Component.Nodes(id=id, node_id=node_id, name=name, lat=lat, long=long, source_id=source_id, country_code=country_code, param={})) id = 'N_' + str(len(ReturnComponent)) name = 'N_' + str(len(ReturnComponent)) node_id = [id] source_id = [C_Code + '_' + str(len(ReturnComponent))] country_code = C_Code lat = seg.lat[1] long = seg.long[1] ReturnComponent.append( K_Component.Nodes(id=id, node_id=node_id, name=name, lat=lat, long=long, country_code=country_code, source_id=source_id, param={})) count = count + 1 # Terminate new data if exceeding user requests if count > NumDataSets: return ReturnComponent return ReturnComponent
def gen_component(dataType, NodesIn): """ Generates a netz component from existing components of this netz, e.g. generation of of nodes list from Segments. Needs instance of netz as input via **LKDInstance**. Component name to be generated suplied as string **dataType**, with current options implemented *Compressors* \n.. comments: Input: dataType: string containing name of component to be created e.g. 'Compressors' LKDInstance: netz class instance Return: ReturnComponent: component list. """ ReturnComponent = [] if dataType in 'Compressors': for seg in NodesIn: if float(seg.param['comp_units']): if seg.param['comp_units'] > 0: id = str(seg.id) source_id = [ID_Add + str(id)] node_id = [str(seg.id)] name = replaceString(seg.param['compressor']) name = str(seg.name) lat = seg.lat long = seg.long country_code = seg.country_code # Param values operator_name = seg.param['operator_name'] license = seg.param['license'] num_turb = seg.param['comp_units'] entsog_key = seg.param['entsog_key'] ReturnComponent.append(K_Component.Compressors(id = id, name = name, source_id = source_id, country_code= country_code, node_id = node_id, lat = lat, long = long, param = {'operator_name': operator_name, 'num_turb' : num_turb, 'entsog_key': entsog_key, 'license' : license})) elif ((len(seg.param['compressor']) > 0) and ('Regelanlage' not in seg.param['compressor']) and ('NULL' not in seg.param['compressor']) and ('VErdichter' in seg.param['compressor'])): id = str(seg.id) source_id = [ID_Add + str(id)] node_id = [str(seg.id)] name = replaceString(seg.name) name = str(seg.name) lat = seg.lat long = seg.long country_code = seg.country_code # Param values operator_name = seg.param['operator_name'] license = seg.param['license'] num_turb = seg.param['comp_units'] entsog_key = seg.param['entsog_key'] ReturnComponent.append(K_Component.Compressors(id = id, name = name, source_id = source_id, country_code = country_code, node_id = node_id, lat = lat, long = long, param = {'operator_name': operator_name, 'num_turb' : num_turb, 'entsog_key' : entsog_key, 'license' : license})) return ReturnComponent
def Graph2Netz(G_Set_Sum): """ Creation of a Netz from a networkx network \n.. comments: Input: G_Set_Sum Network of type networkx Return: G_Netz Netz of type K_Netze.NetComp """ G_Netz = K_Netze.NetComp() Pipe = [] Nodes = [] for node in G_Set_Sum.nodes(): id = G_Set_Sum.node[node]['id'][0] lat = G_Set_Sum.node[node]['pos'][1] long = G_Set_Sum.node[node]['pos'][0] country_code = getAttrib(G_Set_Sum.node[node], 'country_code') param = getAttrib(G_Set_Sum.node[node], 'param', 'param') source_id = getAttrib(G_Set_Sum.node[node], 'source_id', id) node_id = getAttrib(G_Set_Sum.node[node], 'node_id', id) name = getAttrib(G_Set_Sum.node[node], 'name', id) Nodes.append( K_Component.Nodes(id=id, name=name, source_id=source_id, node_id=node_id, long=long, lat=lat, country_code=country_code, param=param)) G_Netz.Nodes = Nodes for edge in G_Set_Sum.edges(): for xx in range(len(G_Set_Sum[edge[0]][edge[1]])): id = G_Set_Sum[edge[0]][edge[1]][xx]['id'][0] latS = G_Set_Sum.node[edge[0]]['pos'][1] longS = G_Set_Sum.node[edge[0]]['pos'][0] latE = G_Set_Sum.node[edge[1]]['pos'][1] longE = G_Set_Sum.node[edge[1]]['pos'][0] country_codeS = G_Set_Sum.node[edge[0]]['country_code'] country_codeE = G_Set_Sum.node[edge[1]]['country_code'] param = getAttrib(G_Set_Sum[edge[0]][edge[1]][xx], 'param', 'param') source_id = getAttrib(G_Set_Sum[edge[0]][edge[1]][xx], 'source_id', id) node_id = [str(edge[0]), str(edge[1])] name = getAttrib(G_Set_Sum[edge[0]][edge[1]][xx], 'name', id) Pipe.append( K_Component.PipeSegments( id=id, name=name, source_id=source_id, node_id=node_id, long=[longS, longE], lat=[latS, latE], country_code=[country_codeS, country_codeE])) G_Netz.PipeSegments = Pipe return G_Netz