def read_component(DataType = '', NumDataSets = 1e+100, RelDirName = None): """ Method of reading in LKD components from shape files. **RelDirName** supplies the relative location of the shape files, whereas **DataType** specifies which component is to be reaad in with options 'PipeSegments', 'Nodes', 'Storages', and 'Productions'. \n.. comments: Input: self: self RelDirName: string, containing the relative path name of where data will be loaded from Default = 'Eingabe/LKD/' Return: [] """ ReturnComponent = [] inCoord = 'epsg:31468' outCoord = 'epsg:4326' count = 0 if DataType in 'PipeSegments': # start = time.time() FileName_Shape = str(RelDirName / 'pipelines_utf8.shp') # Loading from shape file Shapes = shapefile.Reader(FileName_Shape, encoding = "utf8") # Malen der Europa Karte # print('there are pipesegments: ' + str(len(Shapes.shapeRecords()))) for shape in Shapes.shapeRecords(): # Getting PolyLine parts = sorted(shape.shape.parts) # Joining X and Y coordinates from Shape.shape.points vec = shape.shape.points polyLine = K_Component.PolyLine(lat = [], long = []) for x,y in vec: polyLine.long.append(x) polyLine.lat.append(y) # Converting to LatLong polyLine = M_Projection.XY2LatLong(polyLine, inCoord, outCoord) # Generation of PipeLine PipeLine = M_Shape.PolyLine2PipeLines(polyLine, parts, source = C_Code, country_code = C_Code) lat = PipeLine[0].lat long = PipeLine[0].long # Getting Meta data id = str(shape.record[0]) source_id = [ID_Add + str(id)] name = replaceString(shape.record[1]) if len(name) == 0: name = 'PS_' + str(id) # Converting gas_type to boolean is_H_gas = shape.record[2] if is_H_gas == 'L': is_H_gas = 0 elif is_H_gas == 'H': is_H_gas = 1 length = float(shape.record[3])/1000 pipe_class_type = shape.record[6] if pipe_class_type == '': pipe_class_type = None # is_virtualPipe is_virtualPipe = False if len(shape.record[4]) > 0: if shape.record[4] == 1: is_virtualPipe = True # diameter_mm if len(shape.record[5]) > 0: if 'NULL' == shape.record[5]: diameter_mm = float('nan') else: diameter_mm = float(shape.record[5]) else: diameter_mm = float('nan') # max_pressure_bar if shape.record[7] == None: max_pressure_bar = float('nan') elif type(shape.record[7]) == int: max_pressure_bar = float(shape.record[7]) if max_pressure_bar > 200: max_pressure_bar = float('nan') elif len(shape.record[7]) > 0: if 'NULL' == shape.record[7]: max_pressure_bar = float('nan') else: max_pressure_bar = float(shape.record[7]) if max_pressure_bar > 200: max_pressure_bar = float('nan') else: max_pressure_bar = float('nan') diam_est = shape.record[8] class_est = shape.record[9] press_est = shape.record[10] if isinstance(diam_est, str): if diam_est == 'NULL': diam_est = float('nan') diam_est_method = 'raw' diam_est_uncertainty = 0 else: diam_est = diam_est diam_est_method = 'raw' diam_est_uncertainty = 0 else: if diam_est == 1: diam_est_method = 'estimated' diam_est_uncertainty = 1 else: diam_est_method = 'raw' diam_est_uncertainty = 0 if isinstance(class_est, str): if class_est == 'NULL': class_est = float('nan') class_est_method = 'raw' class_est_uncertainty = 0 else: class_est = class_est class_est_method = 'raw' class_est_uncertainty = 0 else: if class_est == 1: class_est_method = 'estimated' class_est_uncertainty = 1 else: class_est_method = 'raw' class_est_uncertainty = 0 if isinstance(press_est, str): if press_est == 'NULL': press_est = float('nan') press_est_method = 'raw' press_est_uncertainty = 0 else: press_est_method = 'raw' press_est_uncertainty = 0 else: if press_est == 1: press_est_method = 'estimated' press_est_uncertainty = 1 else: press_est_method = 'raw' press_est_uncertainty = 0 # if isinstance(class_est, str): # if class_est == 'NULL': # class_est = float('nan') # if isinstance(press_est, str): # if press_est == 'NULL': # press_est = float('nan') max_cap_GWh_per_d = shape.record[11] operator_name = str(shape.record[12]) node_id = ['N_' + str(shape.record[13]), 'N_' + str(shape.record[14])] if 'N_809066' in node_id and 'N_809063' in node_id: if node_id[0] == 'N_809066': node_id[0] = 'N_809076' else: node_id[1] = 'N_809076' if 'N_809066' in node_id and 'N_1000001' in node_id: if node_id[0] == 'N_809066': node_id[0] = 'N_809076' else: node_id[1] = 'N_809076' if 'N_809065' in node_id and 'N_809025' in node_id: if node_id[0] == 'N_809065': node_id[0] = 'N_809075' else: node_id[1] = 'N_809075' if 'N_809065' in node_id and 'N_1000001' in node_id: if node_id[0] == 'N_809065': node_id[0] = 'N_809075' else: node_id[1] = 'N_809075' if 'N_809064' in node_id and 'N_809026' in node_id: if node_id[0] == 'N_809064': node_id[0] = 'N_809074' else: node_id[1] = 'N_809074' if 'N_809064' in node_id and 'N_1000001' in node_id: if node_id[0] == 'N_809064': node_id[0] = 'N_809074' else: node_id[1] = 'N_809074' country_code = ['DE', 'DE'] if is_virtualPipe == False: ReturnComponent.append(K_Component.PipeSegments(id = id, name = name, lat = lat, long = long, country_code = country_code, node_id = node_id, source_id = source_id, param = {'max_pressure_bar': max_pressure_bar, 'is_H_gas' : is_H_gas, 'length' : length, 'diameter_mm' : diameter_mm, 'pipe_class_type': pipe_class_type, 'max_cap_GWh_per_d': max_cap_GWh_per_d, 'operator_name' : operator_name}, method = {'diameter_mm' : diam_est_method, 'pipe_class_type' : class_est_method, 'max_pressure_bar' : press_est_method}, uncertainty = {'diameter_mm': diam_est_uncertainty, 'pipe_class_type' : class_est_uncertainty, 'max_pressure_bar' : press_est_uncertainty}, )) count = count + 1 if count > NumDataSets: return ReturnComponent elif DataType in 'Nodes': inCoord = 'epsg:31468' outCoord = 'epsg:4326' FileName_Shape = str(RelDirName / 'nodes_utf8.shp') # Loading from shape file Shapes = shapefile.Reader(FileName_Shape, encoding = "utf8") # Malen der Europa Karte for shape in Shapes.shapeRecords(): id = 'N_' + shape.record[0] source_id = [ID_Add + str(shape.record[0])] name = replaceString(shape.record[1]) operator_name = str(shape.record[2]) is_import = shape.record[3] is_export = shape.record[4] H_L_conver = int(shape.record[5]) operator_Z = shape.record[6] compressor = shape.record[7] compUnit = shape.record[8] if 'NULL' in compUnit: compUnit = 0 elif len(compUnit) == 0: compUnit = 0 else: compUnit = float(compUnit) country_code= shape.record[12] X_coor = shape.record[13] Y_coor = shape.record[14] entsog_nam = str(shape.record[15]) if len(entsog_nam) > 0: name = entsog_nam if name == '': name = 'Ort_' + str(id) entsog_key = shape.record[16] if entsog_key == '': entsog_key = None is_crossBorder = shape.record[17] ugs = shape.record[19] production = shape.record[20] exact = 1 license = 'open data' Line = K_Component.PolyLine(lat = Y_coor, long = X_coor) Line = M_Projection.XY2LatLong(Line, inCoord, outCoord) lat = Line.lat long = Line.long if id == 'N_809066' and country_code == 'AT': id = 'N_809076' elif id == 'N_809065' and country_code == 'AT': id = 'N_809075' elif id == 'N_809064' and country_code == 'AT': id = 'N_809074' ReturnComponent.append(K_Component.Nodes(id = id, node_id = [id], name = name, source_id = source_id, long = long, lat = lat, country_code= country_code, param = {'exact': exact, 'H_L_conver': H_L_conver, 'operator_Z': operator_Z, 'compressor': compressor, 'comp_units': compUnit, 'entsog_key': entsog_key, 'is_crossBorder': is_crossBorder, 'ugs' : ugs, 'production': production, 'operator_name': operator_name, 'is_import' : is_import, 'is_export' : is_export, 'license' : license})) count = count + 1 if count > NumDataSets: return ReturnComponent elif DataType in 'Storages': FileName_Shape = str(RelDirName / 'storages_utf8.shp') # Loading from shape file Shapes = shapefile.Reader(FileName_Shape, encoding = "utf8") # Malen der Europa Karte for shape in Shapes.shapeRecords(): id = 'N_' + shape.record[0] source_id = [ID_Add + str(shape.record[0])] name = replaceString(shape.record[1]) operator_name = str(shape.record[2]) entsog_nam = str(shape.record[9]) if len(entsog_nam) > 0: name = entsog_nam entsog_key = shape.record[10] if entsog_key == '': entsog_key = None max_cap_pipe2store_GWh_per_d = shape.record[11] max_cap_store2pipe_GWh_per_d = shape.record[12] node_id = ['N_' + shape.record[0]] country_code = shape.record[6] ReturnComponent.append(K_Component.Storages(id = id, name = name, source_id = source_id, country_code = country_code, node_id = node_id, param = {'operator_name': operator_name, 'entsog_key' : entsog_key, 'max_cap_pipe2store_GWh_per_d': max_cap_pipe2store_GWh_per_d, 'max_cap_store2pipe_GWh_per_d': max_cap_store2pipe_GWh_per_d})) count = count + 1 if count > NumDataSets: return ReturnComponent elif DataType in 'Productions': FileName_Shape = str(RelDirName / 'productions_utf8.shp') # Loading from shape file Shapes = shapefile.Reader(FileName_Shape, encoding = "utf8") # Malen der Europa Karte for shape in Shapes.shapeRecords(): id = 'N_' + shape.record[0] source_id = [ID_Add + str(shape.record[0])] name = replaceString(shape.record[1]) operator_name = str(shape.record[2]) entsog_nam = str(shape.record[9]) if len(entsog_nam) > 0: name = entsog_nam entsog_key = shape.record[10] if entsog_key == '': entsog_key = None max_production = shape.record[11] node_id = ['N_' + shape.record[0]] country_code = shape.record[6] ReturnComponent.append(K_Component.Productions(id = id, name = name, source_id = source_id, node_id = node_id, country_code = country_code, param = {'entsog_key': entsog_key, 'operator_name': operator_name, 'is_H_gas' : 1, 'max_production_GWh_per_d': max_production})) count = count + 1 if count > NumDataSets: return ReturnComponent return ReturnComponent
def read_component(DataType='LNGs', NumDataSets=100000, requeYear=[2000], DirName=None): """ Reading in GIE LNGs data sets from API, **NumDataSets** maximum number of records to read, and **requeYear** for which year to get data. **RelDirName** is the relative path name. \n.. comments: Input: DataType: string, containing the data type to read, otions are 'LNGs' or 'Storages' NumDataSets: (Optional = 100000) number of data sets requeYear: (Optional = [2000]) list of numbers containing year [####] for which data to be retrieved RelDirName: string, containing relative dir name where GIE meta data default = 'Eingabe/GIE/' Return: ReturnComponent Instance of Component (list of single type elements) """ # dealing with private key ReturnComponent = [] pathPrivKey = os.path.join(os.getcwd(), 'Eingabe/GIE/GIE_PrivateKey.txt') if os.path.isfile(pathPrivKey) is False: print( 'ERROR: M_GIE.read_component: you will need to get a private key from the GIE API.' ) print('Please see documentation for help.') print('No data will be loaded') return ReturnComponent PrivKey = M_Helfer.getLineFromFile(pathPrivKey) if 'LNGs' in DataType: # Initialization webCall_1 = 'https://alsi.gie.eu/api/data/' eic_code = '' count = 0 filename = str(DirName / 'GIE_LNG.csv') print(' LNGs progress:') # Reading Meta data from CSV file # connecting to CSV file fid = open(filename, "r", encoding='iso-8859-15', errors='ignore') # Reading header line fid.readline() # Reading next line temp = M_Helfer.strip_accents(fid.readline()[:-1]) while (len(temp) > 0) and (count < NumDataSets): typeval = temp.split(';')[1] if 'LSO' not in typeval: country_code = temp.split(';')[0] id = temp.split(';')[2] node_id = [id] source_id = [ID_Add + str(id)] facility_code = temp.split(';')[2] name = temp.split(';')[4] name_short = temp.split(';')[5] name_short = replaceString(name_short) ReturnComponent.append( K_Component.LNGs(name=name, id=id, node_id=node_id, source_id=source_id, country_code=country_code, lat=None, long=None, param={ 'facility_code': facility_code, 'name_short': name_short, 'eic_code': eic_code })) count = count + 1 else: eic_code = temp.split(';')[2] # Reading next line temp = M_Helfer.strip_accents(fid.readline()[:-1]) # Creation of a Pool Manager http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED', ca_certs=certifi.where()) # Reading for all created storages the data off the web page #maxSets = min([len(ReturnComponent), NumDataSets]) maxSets = len(ReturnComponent) #for ii in range(96, 100): count = 0 for ii in range(maxSets): # Initialization workingLNGVolume = [] Store2PipeCap = [] # information from CSV file this_facility_code = ReturnComponent[ii].param['facility_code'] this_country_code = ReturnComponent[ii].country_code this_eic_code = ReturnComponent[ii].param['eic_code'] thisURL = webCall_1 + this_facility_code + '/' + this_country_code + '/' + this_eic_code # Get the data URLData = http.request('GET', thisURL, headers={'x-key': PrivKey}) # Convert the data into dict tables = [] try: tables = json.loads(URLData.data.decode('UTF-8')) except: print('ERROR: M_GIE.read_component(LNGs): reading URL failed') return [] # checking that results coming back are ok if tables.__contains__('error'): print( 'GIE load_Storages: something wrong while getting Storage data from GIE' ) #, True) print(tables) # Data allowed to be parsed else: for tt in tables: # Disecting the input for year in requeYear: if (tt['dtmi'] != '-') and (str(year) in tt['gasDayStartedOn']): workingLNGVolume.append( float(tt['dtmi']) * 1000 ) # declared total maximum inventory 1000 m^3 LNG Store2PipeCap.append( float(tt['dtrs']) ) # declared total reference sendout GWh/d (sernd out capacity) # Remove wrong data points workingLNGVolume = M_Helfer.testData(workingLNGVolume, 'PercentAbsDiff', 4, 0) Store2PipeCap = M_Helfer.testData(Store2PipeCap, 'PercentAbsDiff', 4, 0) # Update screen with dot print('.', end='') # Deriving required values from time series ReturnComponent[ii].param.update({ 'max_workingLNG_M_m3': M_MatLab.get_median(workingLNGVolume)[0] / 1000000 }) ReturnComponent[ii].param.update({ 'median_cap_store2pipe_GWh_per_d': M_MatLab.get_median(Store2PipeCap)[0] }) ReturnComponent[ii].param.update({ 'max_cap_store2pipe_GWh_per_d': M_MatLab.get_max(Store2PipeCap)[0] }) count = count + 1 if count > NumDataSets: print(' ') return ReturnComponent elif 'Storages' in DataType: # Initialization webCall_1 = 'https://agsi.gie.eu/api/data/' eic_code = '' count = 0 print(' STORAGES progress:') filename = str(DirName / 'GIE_Storages.csv') # Reading Meta data from CSV file # connecting to CSV file fid = open(filename, "r", encoding="iso-8859-15", errors="surrogateescape") # Reading hearder line fid.readline() # Reading next line temp = M_Helfer.strip_accents(fid.readline()[:-1]) while (len(temp) > 0) and (count < NumDataSets): typeval = temp.split(';')[1] if 'Storage Facility' in typeval: country_code = temp.split(';')[0] id = temp.split(';')[2] node_id = [id] source_id = [ID_Add + str(id)] facility_code = temp.split(';')[2] name = temp.split(';')[4] name_short = temp.split(';')[5] name_short = replaceString(name_short) name_short = name_short.replace(' ', '') name_short = name_short.strip() if 'OudeStatenzijl' in name_short: country_code = 'NL' elif 'KinsaleSouthwest' in name_short: country_code = 'IRL' ReturnComponent.append( K_Component.Storages(name=name, id=id, node_id=node_id, lat=None, long=None, source_id=source_id, country_code=country_code, param={ 'facility_code': facility_code, 'eic_code': eic_code, 'name_short': name_short })) count = count + 1 else: eic_code = temp.split(';')[2] # Reading next line temp = M_Helfer.strip_accents(fid.readline()[:-1]) # Creation of a Pool Manager http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED', ca_certs=certifi.where()) # Reading for all created storages the data off the web page maxSets = min([len(ReturnComponent), NumDataSets]) count = 0 keepPos = [] for ii in range(maxSets): # Initialization max_workingGas_M_m3 = [] Store2PipeCap = [] Pipe2StoreCap1 = [] # information from CSV file this_facility_code = ReturnComponent[ii].param['facility_code'] this_country_code = ReturnComponent[ii].country_code this_eic_code = ReturnComponent[ii].param['eic_code'] thisURL = webCall_1 + this_facility_code + '/' + this_country_code + '/' + this_eic_code # Get the data URLData = http.request('GET', thisURL, headers={'x-key': PrivKey}) # Convert the data into dict tables = [] try: tables = json.loads(URLData.data.decode('UTF-8')) # checking that results coming back are ok if tables.__contains__('error'): print( 'GIE load_Storages: something wrong while getting Storage data from GIE', True) # Data allowed to be parsed else: # print('len(tables[connectionpoints]) ' + str(len(tables['connectionpoints']))) for tt in tables: # Disecting the input for year in requeYear: if (tt['gasInStorage'] != '-') and ( str(year) in tt['gasDayStartedOn']): max_workingGas_M_m3.append( float(tt['workingGasVolume'])) Store2PipeCap.append( float(tt['injectionCapacity'])) Pipe2StoreCap1.append( float(tt['withdrawalCapacity'])) # Remove wrong data sets max_workingGas_M_m3 = M_Helfer.testData( max_workingGas_M_m3, 'PercentAbsDiff', 4, 0) Store2PipeCap = M_Helfer.testData(Store2PipeCap, 'PercentAbsDiff', 4, 0) Pipe2StoreCap = M_Helfer.testData(Pipe2StoreCap1, 'PercentAbsDiff', 4, 0) # Deriving required values from time series # wert, _ = ReturnComponent[ii].param.update({ 'max_workingGas_M_m3': M_MatLab.get_max(max_workingGas_M_m3)[0] }) ReturnComponent[ii].param.update({ 'max_cap_store2pipe_GWh_per_d': M_MatLab.get_max(Store2PipeCap)[0] }) ReturnComponent[ii].param.update({ 'max_cap_pipe2store_GWh_per_d': M_MatLab.get_max(Pipe2StoreCap)[0] }) if math.isnan(ReturnComponent[ii]. param['max_cap_pipe2store_GWh_per_d']): ReturnComponent[ii].param[ 'max_cap_pipe2store_GWh_per_d'] = None if math.isnan(ReturnComponent[ii]. param['max_cap_store2pipe_GWh_per_d']): ReturnComponent[ii].param[ 'max_cap_store2pipe_GWh_per_d'] = None if math.isnan( ReturnComponent[ii].param['max_workingGas_M_m3']): ReturnComponent[ii].param['max_workingGas_M_m3'] = None # Update screen with dot print('.', end='') keepPos.append(ii) count = count + 1 if count > NumDataSets: # Dealing with bad elemebtsm that did not return any URL results tempNetz = K_Netze.NetComp() tempNetz.Storages = ReturnComponent tempNetz.select_byPos('Storages', keepPos) ReturnComponent = tempNetz.Storages print(' ') return ReturnComponent except: print( 'Warning: M_GIE.read_component(Storages): reading URL failed' ) print(' for ', thisURL) # Dealing with bad elemebtsm that did not return any URL results tempNetz = K_Netze.NetComp() tempNetz.Storages = ReturnComponent tempNetz.select_byPos('Storages', keepPos) ReturnComponent = tempNetz.Storages print(' ') return ReturnComponent
def read_component(DataType='Storages', NumDataSets=100000, requeYear=['2000']): """ Reading in GIE LNGs data sets from API, **NumDataSets** maximum number of records to read, and **requeYear** for which year to get data. **RelDirName** is the relative path name. \n.. comments: Input: DataType: string, containing the data type to read, options are 'LNGs' or 'Storages' (Default = 'Storages') NumDataSets: number of data sets (Default = 100000) requeYear: list of string containing year [####] for which data to be retrieved (Default = '2000') RelDirName: string, containing relative dir name where GIE meta data (default = 'Eingabe/GIE/') Return: ReturnComponent: Data structure of IGU data. """ ReturnComponent = [] if 'Storages' in DataType: # Initialization webCall_1 = 'http://members.igu.org/html/wgc2003/WGC_pdffiles/data/Europe/att/UGS_' # Creation of a Pool Manager http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED', ca_certs=certifi.where()) # Reading for all created storages the data off the web page maxSets = min([169, NumDataSets]) for nn in range(maxSets): time.sleep(0.001 + random.randint(1, 100) / 100000) # information from CSV file thisURL = webCall_1 + str(nn) + '.html' # Get the data URLData = http.request('GET', thisURL) # Convert the data into dict try: if 'Application Error' not in str( URLData.data) and 'Appliance Error' not in str( URLData.data ) and '404 Not Found</title>' not in str(URLData.data): soup = BeautifulSoup(URLData.data, 'html.parser') ii = -0.5 for td in soup.find_all('td'): if ii == 0: id = td.font.string.replace('\n', '').strip() source_id = ['IGU_' + str(id)] node_id = [id] # Stimmt das?? elif ii == 1: name = replaceString( td.font.string.replace('\n', '').strip()) id = name node_id = [id] elif ii == 2: is_abandoned = td.font.string.replace('\n', '').strip() if 'in operation' == is_abandoned: is_abandoned = False else: is_abandoned = True elif ii == 3: country_code = M_Helfer.countryName2TwoLetter( td.font.string.replace('\n', '').strip()) # Germany elif ii == 4: store_type = td.font.string.replace( '\n', '').strip() # Oil/Gasfield elif ii == 5: operator_name = td.font.string.replace( '\n', '').strip() # BEB elif ii == 6: wert = td.font.string.replace('\n', '').replace( ',', '.').strip() if '/' in wert: wert = M_Helfer.string2floats( wert.replace('/', ' ')) start_year = wert[0] elif 'Jan. ' in wert: wert = wert.replace('Jan. ', '') start_year = float(wert) elif len(wert) > 0: start_year = float(wert) # 2001 else: start_year = None # 2001 elif ii == 7: wert = td.font.string.replace('\n', '').replace( ',', '.').strip() if len(wert) > 0: max_workingGas_M_m3 = float(wert) else: max_workingGas_M_m3 = None # [mill m³] 2025 elif ii == 8: wert = td.font.string.replace('\n', '').replace( ',', '.').strip() if len(wert) > 0: max_cushionGas_M_m3 = float( wert) # [mill m³] 2358 else: max_cushionGas_M_m3 = None # [mill m³] 2358 elif ii == 9: wert = td.font.string.replace('\n', '').replace( ',', '.').strip() if len(wert) > 0: max_cap_store2pipe_M_m3_per_d = float( wert) / 1000 * 24 else: max_cap_store2pipe_M_m3_per_d = None # Peak withdrawal capacity [10³ m³/h] 840 elif ii == 10: wert = td.font.string.replace('\n', '').replace( ',', '.').strip( ) # Injection capacity [10³ m³/h] 810 if len(wert) > 0: max_cap_pipe2store_M_m3_per_d = float( wert) / 1000 * 24 else: max_cap_pipe2store_M_m3_per_d = None elif ii == 11: wert = td.font.string.replace('\n', '').strip( ) # Storage formation Solling sandstone middle Bunter if wert == '---': storage_formation = None elif wert == '': storage_formation = None else: storage_formation = wert elif ii == 12: wert = td.font.string.replace('\n', '').replace( ',', '.' ).strip( ) # Storage formation Solling sandstone middle Bunter if len(wert) > 0: structure_depth_m = float( wert ) # Depth top structure, resp. cavern roof [m] 2650 else: structure_depth_m = None # Depth top structure, resp. cavern roof [m] 2650 elif ii == 13: wert = td.font.string.replace('\n', '').replace( ',', '.').strip() if len(wert) > 0: min_storage_pressure_bphBar = float( wert) # Min storage pressure [BHP bar] 90 else: min_storage_pressure_bphBar = None elif ii == 14: wert = td.font.string.replace('\n', '').replace( ',', '.').strip() if len(wert) > 0: max_storage_pressure_bphBar = float( wert ) # Max allowable storage pressure [BHP bar] 460 else: max_storage_pressure_bphBar = None elif ii == 15: wert = td.font.string.replace('\n', '').replace( ',', '.').strip() if wert == '---': net_thickness_m = None # Net thickness [m] 22 elif '..' in wert: wert = M_Helfer.string2floats( wert.replace('..', ' ')) net_thickness_m = sum(wert) / float(len(wert)) elif '/' in wert: wert = M_Helfer.string2floats( wert.replace('/', ' ')) net_thickness_m = sum(wert) / float(len(wert)) elif ' - ' in wert: wert = M_Helfer.string2floats( wert.replace(' - ', ' ')) net_thickness_m = sum(wert) / float(len(wert)) elif '-' in wert: wert = M_Helfer.string2floats( wert.replace('-', ' ')) net_thickness_m = sum(wert) / float(len(wert)) elif len(wert) > 0: net_thickness_m = float( wert) # Net thickness [m] 22 else: net_thickness_m = None # Net thickness [m] 22 elif ii == 16: wert = td.font.string.replace('\n', '').replace( ',', '.').strip() # Porosity [%] 22 if wert == '---': porosity_perc = None elif len(wert) == 0: porosity_perc = None elif '(' in wert and ')' in wert: wert = M_Helfer.string2floats( wert.replace('(', '').replace(')', '').replace( ' - ', ' ')) porosity_perc = sum(wert) / float(len(wert)) elif ' - ' in wert: wert = M_Helfer.string2floats( wert.replace(' - ', ' ')) porosity_perc = sum(wert) / float(len(wert)) elif '/' in wert: wert = M_Helfer.string2floats( wert.replace('/', ' ')) porosity_perc = sum(wert) / float(len(wert)) elif ' -' in wert: wert = wert.replace(' -', ' ') if len(wert) > 1: wert = M_Helfer.string2floats(wert) porosity_perc = sum(wert) / float( len(wert)) else: porosity_perc = None elif '-' in wert: wert = wert.replace('-', ' ') if len(wert) > 1: wert = M_Helfer.string2floats(wert) porosity_perc = sum(wert) / float( len(wert)) else: porosity_perc = None else: porosity_perc = wert elif ii == 17: wert = td.font.string.replace( '\n', '').replace(',', '.').strip().replace( ' mD', '') # Permeability [mD] 10 - 1000 (500) if wert == '---': permeability_mD = None elif len(wert) == 0: permeability_mD = None elif '(' in wert and ')' in wert: wert = M_Helfer.string2floats( wert.replace('(', '').replace(')', '').replace( ' - ', ' ')) permeability_mD = sum(wert) / float(len(wert)) elif ' - ' in wert: wert = M_Helfer.string2floats( wert.replace(' - ', ' ')) permeability_mD = sum(wert) / float(len(wert)) elif '-' in wert: wert = wert.replace('-', ' ') if len(wert) > 1: wert = M_Helfer.string2floats(wert) permeability_mD = sum(wert) / float( len(wert)) else: permeability_mD = None elif '/' in wert: wert = M_Helfer.string2floats( wert.replace('/', ' ')) permeability_mD = sum(wert) / float(len(wert)) else: permeability_mD = wert elif ii == 18: wert = td.font.string.replace('\n', '').replace( ',', '.').strip() if len(wert) > 0: num_storage_wells = int( wert ) # No of storage wells, resp. caverns 15 else: num_storage_wells = None # No of storage wells, resp. caverns 15 elif ii == 19: wert = td.font.string.replace('\n', '').replace( ',', '.').strip() if len(wert) > 0: max_power_MW = float( wert) # Installed compressor power [MW] else: max_power_MW = None # Installed compressor power [MW] ii = ii + 0.5 if len(country_code) > 0 and is_abandoned == False: # creating of component Storage ReturnComponent.append( K_Component.Storages( id=id, name=name, node_id=node_id, source_id=source_id, country_code=country_code, lat=None, long=None, param={ 'store_type': store_type, 'operator_name': operator_name, 'start_year': start_year, 'max_workingGas_M_m3': max_workingGas_M_m3, 'max_cushionGas_M_m3': max_cushionGas_M_m3, 'storage_formation': storage_formation, 'structure_depth_m': structure_depth_m, 'net_thickness_m': net_thickness_m, 'porosity_perc': porosity_perc, 'permeability_mD': permeability_mD, 'num_storage_wells': num_storage_wells, 'max_power_MW': max_power_MW, 'max_cap_store2pipe_M_m3_per_d': max_cap_store2pipe_M_m3_per_d, 'max_cap_pipe2store_M_m3_per_d': max_cap_pipe2store_M_m3_per_d, 'min_storage_pressure_bphBar': min_storage_pressure_bphBar, 'max_storage_pressure_bphBar': max_storage_pressure_bphBar })) if len(ReturnComponent) == 7: pass except: print(CC.Warning + 'Warning: M_IGU.read_component: reading URL failed' + CC.End) pass return ReturnComponent
def read_component(DataType='', NumDataSets=100000, requeYear='', RelDirName=None): """ Reading in GIE LNGs data sets from API, **NumDataSets** maximum number of records to read, and **requeYear** for which year to get data. Relative path name of CSV file location is **RelDirName**. \n.. comments: Input: DataType: string, containing the data type to read, otions are 'LNGs' or 'Storages' NumDataSets: number of data sets to be read in (default = 100000) requeYear: string containing year [####] for which data to be retrieved (default = '2000') RelDirName: string, containing dir name where GIE meta data (default = 'Eingabe/GSE/') Return: ReturnComponent: list of elements of a single component. """ ReturnComponent = [] if 'Storages' in DataType: # Initialization count = 0 FileName = str(RelDirName / 'GSE_Storage.csv') # Reading Meta data from CSV file # connecting to CSV file FileEncoding = "ISO-8859-15" # "utf8" fid = open(FileName, "r", encoding=FileEncoding, errors='ignore') # Reading hearder line for ii in range(23): fid.readline() # Reading next line temp = M_Helfer.strip_accents(fid.readline()[:-1]) while (len(temp) > 0) and (count < NumDataSets): Save = False country_code = temp.split(';')[2] operator_name = temp.split(';')[5] name = temp.split(';')[6] id = temp.split(';')[6] id = id.replace("'", ' ') node_id = [id] source_id = [ID_Add + str(id)] status = temp.split(';')[7] # start_year start_year = temp.split(';')[9] if len(start_year) == 0: start_year = None Save = True else: start_year = int(start_year) if requeYear == '': Save = True elif start_year <= int(requeYear): Save = True # max_workingGas_TWh max_workingGas_TWh = temp.split(';')[14] if len(max_workingGas_TWh) == 0: max_workingGas_TWh = None else: max_workingGas_TWh = float(max_workingGas_TWh) # max_cap_store2pipe_GWh_per_d max_cap_store2pipe_GWh_per_d = temp.split(';')[16] if len(max_cap_store2pipe_GWh_per_d) == 0: max_cap_store2pipe_GWh_per_d = None else: max_cap_store2pipe_GWh_per_d = float( max_cap_store2pipe_GWh_per_d) # max_cap_pipe2store_GWh_per_d max_cap_pipe2store_GWh_per_d = temp.split(';')[20] if len(max_cap_pipe2store_GWh_per_d) == 0: max_cap_pipe2store_GWh_per_d = None else: max_cap_pipe2store_GWh_per_d = float( max_cap_pipe2store_GWh_per_d) is_inEU = temp.split(';')[25] # is_inEU if is_inEU.lower() == 'yes': is_inEU = True else: is_inEU = False inEUMember = temp.split(';')[23] if 'y' == inEUMember and is_inEU == True and Save: name_short = name name_short = name_short.replace('SERENE Nord: ', '') name_short = name_short.replace('VGS SEDIANE B: ', '') name_short = name_short.replace('SERENE SUD', '') name_short = name_short.replace('SEDIANE LITTORAL:', '') name_short = name_short.replace('(XIV-XV)', '') name_short = name_short.replace('(Atwick)', '') name_short = name_short.replace('SEDIANE: ', '') name_short = name_short.replace('GSF ', '') name_short = name_short.replace('VGS ', '') name_short = name_short.replace('Eneco', '') name_short = name_short.replace('Uniper', '') name_short = name_short.replace('HGas', 'H') name_short = name_short.replace('LGas', 'L') name_short = name_short.replace('H-Gas', 'H') name_short = name_short.replace('L-Gas', 'L') name_short = name_short.replace('complex', '') name_short = name_short.replace('Trianel', '') name_short = name_short.replace('Offshore', '') name_short = name_short.replace('/', '') name_short = name_short.replace('-', '') name_short = name_short.replace('?', '') name_short = name_short.replace(':', '') name_short = name_short.replace('\'', '') name_short = name_short.replace('t', 'T') name_short = name_short.replace(' ', '') #'operator_name': operator_name, #'status' : status, # 'start_year': start_year, ReturnComponent.append( K_Component.Storages(name=name, id=id, node_id=node_id, country_code=country_code, lat=None, long=None, source_id=source_id, param={ 'name_short': name_short, 'max_cap_store2pipe_GWh_per_d': max_cap_store2pipe_GWh_per_d, 'max_cap_pipe2store_GWh_per_d': max_cap_pipe2store_GWh_per_d, 'max_workingGas_TWh': max_workingGas_TWh })) count = count + 1 # Reading next line temp = M_Helfer.strip_accents(fid.readline()[:-1]) return ReturnComponent