Exemplo n.º 1
0
def read_CSV(CSV_Path):
    """Description:
    ------------
        Reads Data from folder CSV_Path into Grid 
        Grid = Instance of Netz Class
        
    Input Parameter:
    ----------------
        CSV_Path        string containing path name of data location
        
    Return Parameters:
    ------------------
        Grid            instance of class K_Netze.Netz, populated with 
                         data from CSV files"""

    FileList = [
        'BorderPoints', 'PipePoints', 'Compressors', 'Nodes', 'EntryPoints',
        'InterConnectionPoints', 'LNGs', 'Meta_BorderPoints',
        'Meta_Compressors', 'Meta_EntryPoints', 'Meta_InterConnectionPoints',
        'Meta_LNGs', 'Meta_PipePoints', 'Meta_Storages', 'Storages'
    ]

    print('')
    print(CC.Caption + 'Load CSV-Data into Grid' + CC.End)
    print('--------------------------------------')

    Grid = K_Netze.NetComp()
    Grid.Processes.append(K_Netze.Processes('M_CSV.read_CSV'))

    for filename in os.listdir(CSV_Path):
        # check if Filename is used for Import
        for key in FileList:
            if 'Meta_' in key:
                filename = key + '.csv'
            else:
                filename = 'Gas_' + key + '.csv'
            CSV_File = os.path.join((CSV_Path), (filename))
            Z = CSV_2_list(CSV_File)
            if len(Z) > 0:
                for entry in Z:
                    Keys = list(entry.keys())
                    Vals = list(entry.values())
                    posId = M_FindPos.find_pos_StringInList('id', Keys)
                    posName = M_FindPos.find_pos_StringInList('name', Keys)
                    del entry['id']
                    del entry['name']
                    Grid.__dict__[key].append(K_Netze.__dict__[key](
                        id=Vals[posId[0]], name=Vals[posName[0]], param=entry))
            else:
                Grid.__dict__[key] = []

    return Grid
Exemplo n.º 2
0
def create_stats(Graph_MD):
    """ Returning stats values for input **Graph_MD** of type networks. 
	Stats are: 
	- number of graphs, 
	- number of unconnected graphs, 
	- number of nodes per graph, 
	- number of edges per graph, 
	- number of unconnected nodes. 

    \n.. comments:
    Input:
        Graph_MD:           instance of a networkx Graph 
    Return:
        StatsValues         Variables of type NetzKlassen.StatsValue() with values.
    """

    # Initializierung von Variabeln
    StatsValues = K_Netze.StatsValue()

    # Erstellugn von Variabeln
    tot_num_nodes = Graph_MD.number_of_nodes()
    num_nodes = 0
    num_edges = 0
    tot_length = 0
    num_graphen = 0  # number of graphs
    num_dg = 0
    num_dg_nodes = 0
    countE = 0
    count = 0
    try:
        if Graph_MD.is_multigraph():
            num_graphen = 2
        else:
            num_graphen = 1
        num_nodes = num_nodes + Graph_MD.number_of_nodes()  # number of nodes
        num_edges = num_edges + Graph_MD.number_of_edges()  # number of edges
        num_dg = num_graphen - 1  # number of disconnected graphs
        tot_length = tot_length + Graph_MD.size(weight='weight')
        countE = countE + 1
        count = count + 1
        num_dg_nodes = tot_num_nodes - num_nodes  # total number of disconnected graphs nodes
    except:
        print('{}'.format('ERROR: M_Graph.create_stats: im ersten Segment'))
        raise

    print("num_graphen:  {0}".format(num_graphen))
    print("num_dg:       {0}".format(num_dg))
    print("num_nodes:    {0}".format(num_nodes))
    print("num_edges:    {0}".format(num_edges))
    print("num_dg_nodes: {0}".format(num_dg_nodes))
    print("tot_length:   {0}".format(tot_length))
    StatsValues.num_Graphen = num_graphen
    StatsValues.num_disGraphen = num_dg
    StatsValues.num_Knoten = num_nodes / num_graphen
    StatsValues.num_Kanten = num_edges / num_graphen
    StatsValues.num_disKnoten = num_dg_nodes
    StatsValues.summe_Kanten = tot_length
    StatsValues.durchschnitt_KantenLaenge = tot_length / num_edges

    return StatsValues
Exemplo n.º 3
0
def read_DB(currentDB,GridName):
    """
    Description:
    ------------
        Reads Grid with the Name 'Gridname' from Postgres Database 
        and writes to empty Grid (Instance of NetzClass)
    
    Eingabe:
    --------
        currentDB=InfoIO['SQL_3']
        GridName="SciGrid" 
    
    Ausgabe:
    --------
        Netz Class Object
    
    """
    
    conn    = CreateDBConnection(currentDB)
    cur     = conn.cursor()
    Grid    = K_Netze.NetComp()
    
    cur.execute("SELECT table_name FROM information_schema.tables WHERE table_schema='public' ;")
    tableDump = cur.fetchall()
    tableNames = []
    for tab in tableDump:
        tableNames.append(tab[0])
    
    for tablename in Grid.CompLabels():
        if GridName.lower()  + "_" + tablename.lower()   in tableNames:
            
            tablecommand    = "SELECT * FROM " + GridName.lower() + "_" + tablename.lower() + " ;"
            cur.execute(tablecommand)
            field_names = [i[0] for i in cur.description]
            results         = cur.fetchall()
            if cur.rowcount>0:
                my_class = my_import('Code.K_Component.' + tablename)
                
                #Grid.__setattr__(tablename,[my_class({'id':'4', 'name':'qwer'})])
    
                Grid.__setattr__(tablename,[my_class(**dict(zip(field_names, res))) for res in results])
            else:
               print(CC.Warning +"Warning, "+ "Could not read " + tablename + " from table: "+GridName.lower()+"_"+tablename.lower()+CC.End)
               
            # Now setting lat long to None instead of ''
            for ii in range(len(Grid.__dict__[tablename])):
                if len(Grid.__dict__[tablename][ii].__dict__['lat']) == 0:
                    Grid.__dict__[tablename][ii].__dict__['lat'] = float('nan')
                    Grid.__dict__[tablename][ii].__dict__['long'] = float('nan')
                else:
                    Grid.__dict__[tablename][ii].__dict__['lat'] = float(Grid.__dict__[tablename][ii].__dict__['lat'])
                    Grid.__dict__[tablename][ii].__dict__['long'] = float(Grid.__dict__[tablename][ii].__dict__['long'] )
                    
                

    cur.close()
    conn.close()

    return Grid
Exemplo n.º 4
0
def read(NumDataSets = 1e+100, RelDirName  = 'Eingabe/GasLib/', sourceName = None, RelDirNameInter = 'Eingabe/InternetDaten/'):
    """ Reading of GasLib data sets from XML files, with **RelDirName** indicating which directory to 
	read data from, **NumDataSets** maximum number of records to read. 

    \n.. comments: 
    Input:
        NumDataSets:    	max number of data sets to be read in
                            (Default = 100000) 
        RelDirName:     	string, containing dir name where GasLib  data is found
                            (Default = 'Eingabe/GasLib/')
		sourceName: 	    String of source abbreviation.
							(Default = None)
		RelDirNameInter:     Not used, only included as option, as is option for other data source read functions.
							(Default = 'Eingabe/InternetDaten/')							
    Return:
	    Ret_Data:      Instance of K_Netze.NetComp class, with components Nodes and Storages populated."""
    
    # Initialization
    Ret_Data                = K_Netze.NetComp()
#    RelDirName              = Path(RelDirName)
    RelDirName              = os.path.join(os.getcwd(),RelDirName)
    print('Read from: ',RelDirName)
    
    # Reading Raw Data
    Ret_Data.Nodes           = read_component('Nodes',        NumDataSets = NumDataSets, RelDirName = RelDirName, sourceName = sourceName + '.net')
    Ret_Data.PipeSegments    = read_component('PipeSegments', NumDataSets = NumDataSets, RelDirName = RelDirName, sourceName = sourceName + '.net', Nodes = Ret_Data.Nodes)
    Ret_Data.Compressors     = read_component('Compressors',  NumDataSets = NumDataSets, RelDirName = RelDirName, sourceName = sourceName + '.net')
    Ret_Data.EntryPoints     = read_component('EntryPoints',  NumDataSets = NumDataSets, RelDirName = RelDirName, sourceName = sourceName + '.net')
       
    
    # Adding LatLong to all components        
    Ret_Data.add_latLong()


    # Cleaning up node_id and nodes
    Ret_Data.merge_Nodes_Comps(compNames = ['PipeSegments', 'Compressors', 'EntryPoints', 'Nodes'])
    Ret_Data.remove_unUsedNodes()


    # Assuring that all elements of a component having same attributes, and 
    # keeping track of origin of data
    Ret_Data.setup_SameAttribs([], None)


    # Adding further essential attributess
    Ret_Data.fill_length('PipeSegments')
    Ret_Data.make_Attrib(['PipeSegments'], 'lat',  'lat_mean',  'mean')
    Ret_Data.make_Attrib(['PipeSegments'], 'long',  'long_mean',  'mean')


    # Adding SourceName
    Ret_Data.SourceName     = ['GasLib']
    
    return Ret_Data
Exemplo n.º 5
0
def read(NumDataSets=100000, RelDirName='Eingabe/NO/'):
    """ Reading of Norwegian Petroleum Directorate data sets from Shapefiles, with **RelDirName** indicating which directory to 
	read data from, **NumDataSets** maximum number of records to read. 

    \n.. comments: 
    Input:
        NumDataSets:    	max number of data sets to be read in
                            (Default = 100000) 
        RelDirName:     	string, containing dir name where GasLib  data is found
                            (Default = 'Eingabe/NO/')
    Return:
	    Ret_Data:      Instance of K_Netze.NetComp class, with Nodes and Pipesegments populated."""
    # parse string RelDirPath
    RelDirName = Path(RelDirName)

    # init object which to be returned
    Ret_Data = K_Netze.NetComp()

    # read out all pipelines from shapefile
    Ret_Data.PipeLines = read_component('PipeLines',
                                        NumDataSets,
                                        RelDirName=RelDirName)
    # read out all nodes from shapefile
    Ret_Data.Nodes = read_component('Nodes',
                                    NumDataSets,
                                    RelDirName=RelDirName)

    # Converting from PipeLines to PipeSegments
    Ret_Data.PipeLines2PipeSegments()
    Ret_Data.PipeLines = []

    # merge Nodes and Pipesegments
    Ret_Data.merge_Nodes_Comps(compNames=['PipeSegments', 'Nodes'])

    # remove unused Nodes
    Ret_Data.remove_unUsedNodes()

    # Assuring that all elements of a component having same attributes, and
    # keeping track of origin of data
    Ret_Data.setup_SameAttribs([], None)

    # Adding further essential attributes
    Ret_Data.replace_length(compName='PipeSegments')
    Ret_Data.make_Attrib(['PipeSegments'], 'lat', 'lat_mean', 'mean')
    Ret_Data.make_Attrib(['PipeSegments'], 'long', 'long_mean', 'mean')
    Ret_Data.make_Attrib(['Nodes'], '', 'exact', 'const', 1)
    Ret_Data.make_Attrib(['PipeSegments'], '', 'is_H_gas', 'const', 1)
    Ret_Data.SourceName = [C_Code]

    return Ret_Data
Exemplo n.º 6
0
def join_PipeLine_Meta(Elemente, Meta_Elemente, Meta_Namen, Meta_Typen,
                       Method_Name):
    """ Function to join elements (**Elemente**) with meta data of elements **Meta_Elemente**.  

    \n.. comments: 
    Input:
        Elemente:            Gas Netzwerk elements (topological information)
        Meta_Elemente:       Information from Meta data for PipeLines 
        Meta_Namen:          Variable names of Meta_Elemente
        Meta_Typen:          List of strings indicating the type of data
        Method_Name:         List of strings, containing indicator if column is to be stored in Param dict
    Return:
        Elemente:            Gas Netzwerk elements linked to the Meta data.
    """

    # Initializierung von Variabeln
    Meta_comp_ids = M_Helfer.get_attribFromList(Meta_Elemente, 'comp_id')
    countEle = 0
    posKeep = []
    try:
        for ele in Elemente:
            dieserWert = ele.param['meta_id']
            pos = M_FindPos.find_pos_StringInList(dieserWert, Meta_comp_ids)
            if len(pos) > 0:
                posKeep.append(countEle)
                for idx, metName in enumerate(Meta_Namen):
                    if metName != 'comp_id' and metName != 'id':
                        if len(Method_Name[idx]) > 0:
                            Elemente[countEle].param.update({
                                metName:
                                getattr(Meta_Elemente[pos[0]], metName)
                            })
                        else:
                            setattr(Elemente[countEle], metName,
                                    getattr(Meta_Elemente[pos[0]], metName))

            countEle = countEle + 1

        Temp = K_Netze.NetComp()
        Temp.Temp = Elemente
        Temp.select_byPos('Temp', posKeep)
        Elemente = Temp.Temp

    except:
        print("ERROR: M_Verknuepfe.join_Component_Meta")
        raise

    return Elemente
Exemplo n.º 7
0
def leseSQL_Punkte(InfoSQL, TabellenName):
    """
    Liest Punkte Tabellen from SQL data base
    
    Eingabe:
        InfoSQL         Strukture von SQL DatenBank Zuganz Daten
        TabellenName    String, von TabellenNamen, via InfoSQL[TabellenName] !!
    Ausgabe:
        Punkte          Liste von Punkten von Klasse NetzKlassen.Nodes()

    """
    Punkte = []
    # Ueberpruefe das Tabelle in DataBank ist
    AlleTabellenNamen = getAllTableNames(InfoSQL)
    Name = InfoSQL['IO'][TabellenName]
    if len(M_FindPos.find_pos_StringInList(Name, AlleTabellenNamen)):
        con = connect(dbname = InfoSQL['IO']["DataBAseName"],  user = InfoSQL['IO']["User"],  host = InfoSQL['IO']["Host"],  port = int(InfoSQL['IO']["Port"]),  password = InfoSQL['IO']["PassWord"])
        cur = con.cursor()
        CurString = "SELECT * FROM " + InfoSQL['IO'][TabellenName]
        cur.execute(CurString)
        TabPunkte = cur.fetchall()
        cur.close()
        con.close()
        count = 0
        for tab in TabPunkte:
            id    = tab[0]
            name  = tab[1]
            lat   = tab[5]
            long  = tab[6]
            land  = tab[4]
            Punkte.append(K_Netze.Nodes(id = id, name = name,  lat = lat, long = long, country_code = land, comment = None, param = {}))
            count = count + 1
    
    
    return Punkte
	
	
Exemplo n.º 8
0
def read(NumDataSets=1e+100,
         requeYear='2000',
         RelDirName='Eingabe/GSE/',
         RelDirNameInter='Eingabe/InternetDaten/'):
    """ Reading in GIE data sets from API, with **DirName** indicating which directory to 
	store data to, **NumDataSets** maximum number of records to read, and **requeYear** for 
	which year to get data. **RelDirName** is relative path name of where CSV files can be found.

    \n.. comments: 
    Input:
        NumDataSets:    	number of data sets
                            (default = 100000) 
		requeYear: 			string containing year [####] for which data to be retrieved
                            (default = '2000') 
        RelDirName:     	string, containing dir name where GIE meta data
                            (default = 'Eingabe/GSE/')
    Return:
	    Ret_Data:      Instance of K_Netze.NetComp class, with components Nodes and Storages populated.
    """

    Ret_Data = K_Netze.NetComp()
    Nodes = []
    Storages = []
    RelDirName = Path(RelDirName)

    # Reading Raw Data
    Storages = read_component('Storages',
                              NumDataSets=NumDataSets,
                              requeYear=requeYear,
                              RelDirName=RelDirName)

    # Generation of additional components
    Nodes = gen_component('Nodes', Storages)  # check this one

    Ret_Data.Nodes = Nodes
    Ret_Data.Storages = Storages

    if RelDirNameInter != None:
        Netz_Internet = K_Netze.NetComp()
        RelDirNameInter = Path(RelDirNameInter)

        Netz_Internet.Nodes = M_Internet.read_component(
            "Nodes", NumDataSets, 0, RelDirName=RelDirNameInter)

        [pos_match_Netz_0, pos_add_Netz_0, pos_match_Netz_1,
         pos_add_Netz_1] = JoinNetz.match(
             Netz_Internet,
             Ret_Data,
             compName='Nodes',
             threshold=80,
             multiSelect=True,
             funcs=lambda comp_0, comp_1: M_Matching.
             getMatch_Names_CountryCode(comp_0, comp_1, AddInWord=100),
             numFuncs=1)

        if len(pos_add_Netz_1) > 0:
            print('WARNING: M_GSE.read(): ' + str(len(pos_add_Netz_1)) +
                  ' from ' + str(len(Ret_Data.Storages)) +
                  ' locations could not be GeoReferenced.')
        else:
            print('Comment: M_GSE.read(): All locations were GeoReferenced.')

        Ret_Data = M_Netze.copy_Vals(Netz_Internet, 'Nodes', 'lat', Ret_Data,
                                     'Nodes', 'lat', pos_match_Netz_0,
                                     pos_match_Netz_1)
        Ret_Data = M_Netze.copy_Vals(Netz_Internet, 'Nodes', 'long', Ret_Data,
                                     'Nodes', 'long', pos_match_Netz_0,
                                     pos_match_Netz_1)
        Ret_Data = M_Netze.copy_ParamVals(Netz_Internet, 'Nodes', 'exact',
                                          Ret_Data, 'Nodes', 'exact',
                                          pos_match_Netz_0, pos_match_Netz_1)

        # Adding lat long to all component elements
        Ret_Data.add_latLong()

    # Unit conversion
    Ret_Data.MoveUnits('Storages',
                       'max_cap_pipe2store_GWh_per_d',
                       'max_cap_pipe2store_M_m3_per_d',
                       replace=True)
    Ret_Data.MoveUnits('Storages',
                       'max_cap_store2pipe_GWh_per_d',
                       'max_cap_store2pipe_M_m3_per_d',
                       replace=True)
    Ret_Data.MoveUnits('Storages',
                       'max_workingGas_TWh',
                       'max_workingGas_M_m3',
                       replace=True)

    # Removing attributes
    Ret_Data.removeAttrib(
        'Nodes', ['name_short', 'operator_name', 'start_year', 'status'])
    Ret_Data.removeAttrib('Nodes', [
        'max_cap_pipe2store_GWh_per_d', 'max_cap_store2pipe_GWh_per_d',
        'max_workingGas_TWh'
    ])
    Ret_Data.removeAttrib('Storages', [
        'max_cap_pipe2store_GWh_per_d', 'max_cap_store2pipe_GWh_per_d',
        'max_workingGas_TWh'
    ])

    # Cleaning up node_id and nodes
    Ret_Data.merge_Nodes_Comps(compNames=['Storages', 'Nodes'])
    Ret_Data.remove_unUsedNodes()

    # Assuring that all elements of a component having same attributes, and
    # keeping track of origin of data
    Ret_Data.setup_SameAttribs([], None)

    # Adding SourceName
    Ret_Data.SourceName = ['GSE']

    return Ret_Data
Exemplo n.º 9
0
def read_component(DataType='LNGs',
                   NumDataSets=100000,
                   requeYear=[2000],
                   DirName=None):
    """ Reading in GIE LNGs data sets from API, **NumDataSets** maximum number of records to read, 
	and **requeYear** for which year to get data. **RelDirName** is the relative path name.

    \n.. comments: 
    Input:
        DataType:        string, containing the data type to read, otions are 'LNGs' or 'Storages'
        NumDataSets:     (Optional = 100000) number of data sets
		requeYear: 		(Optional = [2000]) list of numbers containing year [####] for which data to be retrieved
        RelDirName:     string, containing relative dir name where GIE meta data
                         default = 'Eingabe/GIE/'
    Return:
	    ReturnComponent	Instance of Component (list of single type elements)
    """

    # dealing with private key
    ReturnComponent = []
    pathPrivKey = os.path.join(os.getcwd(), 'Eingabe/GIE/GIE_PrivateKey.txt')
    if os.path.isfile(pathPrivKey) is False:
        print(
            'ERROR: M_GIE.read_component: you will need to get a private key from the GIE API.'
        )
        print('Please see documentation for help.')
        print('No data will be loaded')
        return ReturnComponent

    PrivKey = M_Helfer.getLineFromFile(pathPrivKey)

    if 'LNGs' in DataType:
        # Initialization
        webCall_1 = 'https://alsi.gie.eu/api/data/'
        eic_code = ''
        count = 0
        filename = str(DirName / 'GIE_LNG.csv')
        print('        LNGs progress:')

        # Reading Meta data from CSV file
        # connecting to CSV file
        fid = open(filename, "r", encoding='iso-8859-15', errors='ignore')
        # Reading header line
        fid.readline()
        # Reading next line
        temp = M_Helfer.strip_accents(fid.readline()[:-1])

        while (len(temp) > 0) and (count < NumDataSets):
            typeval = temp.split(';')[1]
            if 'LSO' not in typeval:
                country_code = temp.split(';')[0]
                id = temp.split(';')[2]
                node_id = [id]
                source_id = [ID_Add + str(id)]
                facility_code = temp.split(';')[2]
                name = temp.split(';')[4]
                name_short = temp.split(';')[5]
                name_short = replaceString(name_short)

                ReturnComponent.append(
                    K_Component.LNGs(name=name,
                                     id=id,
                                     node_id=node_id,
                                     source_id=source_id,
                                     country_code=country_code,
                                     lat=None,
                                     long=None,
                                     param={
                                         'facility_code': facility_code,
                                         'name_short': name_short,
                                         'eic_code': eic_code
                                     }))

                count = count + 1
            else:
                eic_code = temp.split(';')[2]

            # Reading next line
            temp = M_Helfer.strip_accents(fid.readline()[:-1])

        # Creation of a Pool Manager
        http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED',
                                   ca_certs=certifi.where())
        # Reading for all created storages the data off the web page
        #maxSets     = min([len(ReturnComponent), NumDataSets])
        maxSets = len(ReturnComponent)
        #for ii in range(96, 100):
        count = 0
        for ii in range(maxSets):
            # Initialization
            workingLNGVolume = []
            Store2PipeCap = []

            # information from CSV file
            this_facility_code = ReturnComponent[ii].param['facility_code']
            this_country_code = ReturnComponent[ii].country_code
            this_eic_code = ReturnComponent[ii].param['eic_code']
            thisURL = webCall_1 + this_facility_code + '/' + this_country_code + '/' + this_eic_code
            # Get the data
            URLData = http.request('GET', thisURL, headers={'x-key': PrivKey})

            # Convert the data into dict
            tables = []
            try:
                tables = json.loads(URLData.data.decode('UTF-8'))
            except:
                print('ERROR: M_GIE.read_component(LNGs): reading URL failed')
                return []

            # checking that results coming back are ok
            if tables.__contains__('error'):
                print(
                    'GIE load_Storages: something wrong while getting Storage data from GIE'
                )  #, True)
                print(tables)
            # Data allowed to be parsed
            else:
                for tt in tables:
                    # Disecting the input
                    for year in requeYear:
                        if (tt['dtmi'] != '-') and (str(year)
                                                    in tt['gasDayStartedOn']):
                            workingLNGVolume.append(
                                float(tt['dtmi']) * 1000
                            )  # declared total maximum inventory 1000 m^3 LNG
                            Store2PipeCap.append(
                                float(tt['dtrs'])
                            )  # declared total reference sendout GWh/d (sernd out capacity)

                # Remove wrong data points
                workingLNGVolume = M_Helfer.testData(workingLNGVolume,
                                                     'PercentAbsDiff', 4, 0)
                Store2PipeCap = M_Helfer.testData(Store2PipeCap,
                                                  'PercentAbsDiff', 4, 0)

                # Update screen with dot
                print('.', end='')

                # Deriving required values from time series
                ReturnComponent[ii].param.update({
                    'max_workingLNG_M_m3':
                    M_MatLab.get_median(workingLNGVolume)[0] / 1000000
                })
                ReturnComponent[ii].param.update({
                    'median_cap_store2pipe_GWh_per_d':
                    M_MatLab.get_median(Store2PipeCap)[0]
                })
                ReturnComponent[ii].param.update({
                    'max_cap_store2pipe_GWh_per_d':
                    M_MatLab.get_max(Store2PipeCap)[0]
                })

                count = count + 1
                if count > NumDataSets:
                    print(' ')
                    return ReturnComponent

    elif 'Storages' in DataType:
        # Initialization
        webCall_1 = 'https://agsi.gie.eu/api/data/'
        eic_code = ''
        count = 0
        print('         STORAGES progress:')

        filename = str(DirName / 'GIE_Storages.csv')

        # Reading Meta data from CSV file
        # connecting to CSV file
        fid = open(filename,
                   "r",
                   encoding="iso-8859-15",
                   errors="surrogateescape")
        # Reading hearder line
        fid.readline()
        # Reading next line
        temp = M_Helfer.strip_accents(fid.readline()[:-1])
        while (len(temp) > 0) and (count < NumDataSets):
            typeval = temp.split(';')[1]
            if 'Storage Facility' in typeval:
                country_code = temp.split(';')[0]
                id = temp.split(';')[2]
                node_id = [id]
                source_id = [ID_Add + str(id)]
                facility_code = temp.split(';')[2]
                name = temp.split(';')[4]
                name_short = temp.split(';')[5]
                name_short = replaceString(name_short)

                name_short = name_short.replace(' ', '')
                name_short = name_short.strip()
                if 'OudeStatenzijl' in name_short:
                    country_code = 'NL'
                elif 'KinsaleSouthwest' in name_short:
                    country_code = 'IRL'

                ReturnComponent.append(
                    K_Component.Storages(name=name,
                                         id=id,
                                         node_id=node_id,
                                         lat=None,
                                         long=None,
                                         source_id=source_id,
                                         country_code=country_code,
                                         param={
                                             'facility_code': facility_code,
                                             'eic_code': eic_code,
                                             'name_short': name_short
                                         }))

                count = count + 1
            else:
                eic_code = temp.split(';')[2]

            # Reading next line
            temp = M_Helfer.strip_accents(fid.readline()[:-1])

        # Creation of a Pool Manager
        http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED',
                                   ca_certs=certifi.where())
        # Reading for all created storages the data off the web page
        maxSets = min([len(ReturnComponent), NumDataSets])

        count = 0
        keepPos = []
        for ii in range(maxSets):
            # Initialization
            max_workingGas_M_m3 = []
            Store2PipeCap = []
            Pipe2StoreCap1 = []

            # information from CSV file
            this_facility_code = ReturnComponent[ii].param['facility_code']
            this_country_code = ReturnComponent[ii].country_code
            this_eic_code = ReturnComponent[ii].param['eic_code']
            thisURL = webCall_1 + this_facility_code + '/' + this_country_code + '/' + this_eic_code

            # Get the data
            URLData = http.request('GET', thisURL, headers={'x-key': PrivKey})

            # Convert the data into dict
            tables = []
            try:
                tables = json.loads(URLData.data.decode('UTF-8'))
                # checking that results coming back are ok
                if tables.__contains__('error'):
                    print(
                        'GIE load_Storages: something wrong while getting Storage data from GIE',
                        True)

                # Data allowed to be parsed
                else:
                    # print('len(tables[connectionpoints]) ' + str(len(tables['connectionpoints'])))

                    for tt in tables:
                        # Disecting the input
                        for year in requeYear:
                            if (tt['gasInStorage'] != '-') and (
                                    str(year) in tt['gasDayStartedOn']):
                                max_workingGas_M_m3.append(
                                    float(tt['workingGasVolume']))
                                Store2PipeCap.append(
                                    float(tt['injectionCapacity']))
                                Pipe2StoreCap1.append(
                                    float(tt['withdrawalCapacity']))

                    # Remove wrong data sets
                    max_workingGas_M_m3 = M_Helfer.testData(
                        max_workingGas_M_m3, 'PercentAbsDiff', 4, 0)
                    Store2PipeCap = M_Helfer.testData(Store2PipeCap,
                                                      'PercentAbsDiff', 4, 0)
                    Pipe2StoreCap = M_Helfer.testData(Pipe2StoreCap1,
                                                      'PercentAbsDiff', 4, 0)

                    # Deriving required values from time series
                    #                    wert, _ =
                    ReturnComponent[ii].param.update({
                        'max_workingGas_M_m3':
                        M_MatLab.get_max(max_workingGas_M_m3)[0]
                    })
                    ReturnComponent[ii].param.update({
                        'max_cap_store2pipe_GWh_per_d':
                        M_MatLab.get_max(Store2PipeCap)[0]
                    })
                    ReturnComponent[ii].param.update({
                        'max_cap_pipe2store_GWh_per_d':
                        M_MatLab.get_max(Pipe2StoreCap)[0]
                    })

                    if math.isnan(ReturnComponent[ii].
                                  param['max_cap_pipe2store_GWh_per_d']):
                        ReturnComponent[ii].param[
                            'max_cap_pipe2store_GWh_per_d'] = None
                    if math.isnan(ReturnComponent[ii].
                                  param['max_cap_store2pipe_GWh_per_d']):
                        ReturnComponent[ii].param[
                            'max_cap_store2pipe_GWh_per_d'] = None
                    if math.isnan(
                            ReturnComponent[ii].param['max_workingGas_M_m3']):
                        ReturnComponent[ii].param['max_workingGas_M_m3'] = None
                    # Update screen with dot
                    print('.', end='')
                    keepPos.append(ii)
                    count = count + 1
                    if count > NumDataSets:
                        # Dealing with bad elemebtsm that did not return any URL results
                        tempNetz = K_Netze.NetComp()
                        tempNetz.Storages = ReturnComponent
                        tempNetz.select_byPos('Storages', keepPos)
                        ReturnComponent = tempNetz.Storages
                        print(' ')
                        return ReturnComponent

            except:
                print(
                    'Warning: M_GIE.read_component(Storages): reading URL failed'
                )
                print('  for ', thisURL)

        # Dealing with bad elemebtsm that did not return any URL results
        tempNetz = K_Netze.NetComp()
        tempNetz.Storages = ReturnComponent
        tempNetz.select_byPos('Storages', keepPos)
        ReturnComponent = tempNetz.Storages
        print(' ')
    return ReturnComponent
Exemplo n.º 10
0
def leseSQL_Leitungen(InfoSQL):
    """
    Liest Leitungs Tabelle from SQL data base
    
    Eingabe:
        InfoSQL         Strukture von SQL DatenBank Zuganz Daten
    Ausgabe:
        Leitung         Liste von Leitung der Klasse NetzKlassen.Leitung()
    """
    
    Leitungen   = []
    
    con         = connect(dbname = InfoSQL['IO']["DataBaseName"],  user = InfoSQL['IO']["User"],  host = InfoSQL['IO']["Host"],  port = int(InfoSQL['IO']["Port"]),  password = InfoSQL['IO']["PassWord"])
    cur         = con.cursor()
    CurString   = "SELECT * FROM " + InfoSQL['IO']["TabName_Leitungen"]
    cur.execute(CurString)
    
    Leitungen = cur.fetchall()

    cur.close()
    con.close()
    
    # Initializieren von Variabeln
    countLeitung    = 0
    countLine       = 0
    MaxNum          = len(Leitungen)
    Leitung         = []
    
    AlleAlleName = []
    for ii in range(MaxNum):
         AlleAlleName.append(Leitungen[ii][2])
    
    #     
    while countLine < MaxNum:
        Leitung.append(K_Netze.Leitung())
        dieserLeitungsName                  = Leitungen[countLine][2]           # LeitungsNamen
        dieserPunktName                     = Leitungen[countLine][3]           # LeitungsNamen
        Leitung[countLeitung].name          = dieserLeitungsName
        Leitung[countLeitung].node_id       = dieserPunktName                   # PunktNamen
        Leitung[countLeitung].param['description']   = Leitungen[countLine][6]
        
        #Leitung[countLeitung].__dict__
        # dir(Leitung[countLeitung])
        
        # Kreiert restliche list von LeitungsNamen
        allLeitungsNames                = AlleAlleName[countLine+1:]
        pos = M_FindPos.find_pos_StringInList(dieserLeitungsName, allLeitungsNames)
        if len(pos) == 1:
            dieserPunktName                 = Leitungen[countLine + 1 + pos[0]][3]
            Leitung[countLeitung].node_id.append(dieserPunktName)
        elif len(pos) > 1:
            dieserPunktName                 = Leitungen[countLine + 1+ pos[len(pos) - 1]][3]
            pos                             = pos[0:len(pos)-1]
            for p in pos:
                Leitung[countLeitung].node_id.append(Leitungen[countLine + 1 + p][3])
            Leitung[countLeitung].node_id.append(dieserPunktName)
            pos.append(0)
        else:
            print('Leitung defekt')
        
        
        countLeitung    = countLeitung  + 1
        countLine       = countLine     + 1 + len(pos)

    return Leitung
Exemplo n.º 11
0
def read(NumDataSets=100000,
         requeYear='2000',
         RelDirName='Eingabe/GIE/',
         RelDirNameInter='Eingabe/InternetDaten/'):
    """ Reading in GIE data sets from API, with **RelDirName** indicating which directory 
	from where to load the data from, **NumDataSets** maximum number of records to read, 
	and **requeYear** for which year to get data.

    \n.. comments: 
    Input:
        NumDataSets:     	number of data sets
                            (default = 100000) 
		requeYear: 			string containing year [####] for which data to be retrieved
                            (default = '2000') 
        RelDirName:     	string, of relative directory name where GIE meta data is loaded from
		RelDirNameInter: 	String of location of internet data so that Noders can be loaded from that and GIE stations
                            been given letlong values from Internet data set.
							(default ='Eingabe/InternetDaten/')
    Return:
	    Ret_Data: 			Instance of Netze class."""

    RelDirName = Path(RelDirName)
    Ret_Data = K_Netze.NetComp()
    LNGs = []
    Storages = []

    # Reading Raw Data
    Storages = read_component('Storages',
                              NumDataSets,
                              requeYear,
                              DirName=RelDirName)
    LNGs = read_component('LNGs', NumDataSets, requeYear, DirName=RelDirName)

    # Generation of additional components
    Nodes1 = gen_component('Nodes', LNGs)  # check this one
    Nodes2 = gen_component('Nodes', Storages)  # check this one

    Ret_Data.Nodes = Nodes1 + Nodes2
    Ret_Data.LNGs = LNGs
    Ret_Data.Storages = Storages

    Ret_Data.MoveUnits('LNGs', 'max_workingLNG_M_m3', 'max_workingGas_M_m3')

    # Adding lat long if Netz_Internet supplied
    if RelDirNameInter != None:
        RelDirNameInter = Path(RelDirNameInter)
        Netz_Internet = K_Netze.NetComp()
        Netz_Internet.Nodes = M_Internet.read_component(
            "Nodes", NumDataSets, 0, RelDirName=RelDirNameInter)

        [pos_match_Netz_0, pos_add_Netz_0, pos_match_Netz_1,
         pos_add_Netz_1] = JoinNetz.match(
             Netz_Internet,
             Ret_Data,
             compName='Nodes',
             threshold=80,
             multiSelect=True,
             funcs=lambda comp_0, comp_1: M_Matching.
             getMatch_Names_CountryCode(comp_0, comp_1, AddInWord=100),
             numFuncs=1)

        if len(pos_add_Netz_1) > 0:
            print('WARNING: M_GIE.read(): ' + str(len(pos_add_Netz_1)) +
                  ' from ' + str(len(Ret_Data.Storages)) +
                  ' locations could not be GeoReferenced.')

        Ret_Data = M_Netze.copy_Vals(Netz_Internet, 'Nodes', 'lat', Ret_Data,
                                     'Nodes', 'lat', pos_match_Netz_0,
                                     pos_match_Netz_1)
        Ret_Data = M_Netze.copy_Vals(Netz_Internet, 'Nodes', 'long', Ret_Data,
                                     'Nodes', 'long', pos_match_Netz_0,
                                     pos_match_Netz_1)
        Ret_Data = M_Netze.copy_ParamVals(Netz_Internet, 'Nodes', 'exact',
                                          Ret_Data, 'Nodes', 'exact',
                                          pos_match_Netz_0, pos_match_Netz_1)

        Ret_Data.add_latLong()

        # Unit conversion
    Ret_Data.MoveUnits('LNGs',
                       'max_cap_store2pipe_GWh_per_d',
                       'max_cap_store2pipe_M_m3_per_d',
                       replace=True)
    Ret_Data.MoveUnits('LNGs',
                       'median_cap_store2pipe_GWh_per_d',
                       'median_cap_store2pipe_M_m3_per_d',
                       replace=True)
    Ret_Data.MoveUnits('LNGs',
                       'max_workingLNG_M_m3',
                       'max_workingGas_M_m3',
                       replace=True)
    Ret_Data.MoveUnits('Storages',
                       'max_cap_pipe2store_GWh_per_d',
                       'max_cap_pipe2store_M_m3_per_d',
                       replace=True)
    Ret_Data.MoveUnits('Storages',
                       'max_cap_store2pipe_GWh_per_d',
                       'max_cap_store2pipe_M_m3_per_d',
                       replace=True)

    # Removing attributes
    Ret_Data.removeAttrib('LNGs', [
        'median_cap_store2pipe_GWh_per_d', 'max_cap_store2pipe_GWh_per_d',
        'max_workingLNG_M_m3'
    ])
    Ret_Data.removeAttrib(
        'Storages',
        ['max_cap_pipe2store_GWh_per_d', 'max_cap_store2pipe_GWh_per_d'])

    # Cleaning up node_id and nodes
    Ret_Data.merge_Nodes_Comps(compNames=['LNGs', 'Storages', 'Nodes'])
    Ret_Data.remove_unUsedNodes()

    # Assuring that all elements of a component having same attributes, and
    # keeping track of origin of data
    Ret_Data.setup_SameAttribs([], None)

    # Adding SourceName
    Ret_Data.SourceName = ['GIE']

    return Ret_Data
Exemplo n.º 12
0
def read(NumDataSets = 100000, RelDirName  = 'Eingabe/LKD/'):
    """ Main function to load LKD data set from shape file. Relative location of data given through **RelDirName**.
    """
    Ret_Data                = K_Netze.NetComp()
    Nodes                   = []
    PipeSegments            = []
    Storages                = []
    Compressors             = []
    Productions             = []
    
    RelDirName              = Path(RelDirName)

    
    # reading of raw data components
    PipeSegments    = read_component('PipeSegments', NumDataSets, RelDirName = RelDirName)

    # fix for Inf in PipeSegment
    for pp in PipeSegments:
        for xx in range(len(pp.lat)):
            if str(pp.lat[xx]) == 'inf':
                pp.lat[xx]  = 52.3
                pp.long[xx] = 5.4


    Nodes           = read_component('Nodes', NumDataSets, RelDirName = RelDirName)
    Ret_Data.Nodes  = Nodes
    
    # fixes for inf in data
    for nn in Nodes:
        if str(nn.lat) == 'inf':
            nn.lat  = 52.3
            nn.long = 5.4
            
            
    
    # Fixing country code problems
    Ret_Data        = changeCountryCode(Ret_Data, RelDirName  = 'Eingabe/LKD/LKD_CountryCodeChanges.csv')
    Nodes           = Ret_Data.Nodes
    
    
    Productions     = read_component('Productions', NumDataSets, RelDirName = RelDirName)
        
    Storages        = read_component('Storages', NumDataSets, RelDirName = RelDirName)
        


    # Fixing of PipeSegments and Nodes differences
    PipeSegments = fixPipeSegmentsNode(PipeSegments, Nodes)

    # Fixing some of the LKD pipe Segments
    Ret_Data.PipeSegments   = PipeSegments
    Ret_Data.Nodes          = Nodes
    PipeSegments            = changePipeSegments(Ret_Data, RelDirName  = 'Eingabe/LKD/LKD_NodeChanges.csv')

    Ret_Data.moveAttriVal(sourceCompName = 'Nodes', destinCompName = 'PipeSegments', 
                          sourceFindAttribName = 'id', destinFindAttribName = 'node_id', 
                          sourceAttribName = 'country_code', destinAttribName = 'country_code')

    Ret_Data.Nodes          = M_Shape.reduceElement(Ret_Data.Nodes, reduceType = 'LatLong')
    
    # Generatin of other components
    Compressors             = gen_component('Compressors', Ret_Data.Nodes)
   
    Ret_Data.Storages       = Storages
    Ret_Data.Compressors    = Compressors
    Ret_Data.Productions    = Productions
    Ret_Data.PipeLines      = []
    
    # reduction of PipeSegments from 1809 -->  1261 
    Ret_Data.PipeSegments2PipeSegments(attribListNames = ['max_pressure_bar', 'gas_type_isH', 'diameter_mm', 'pipe_class_type'], exceptNodes = ['Haidach', 'N_805129'])
    
    # Adding lat long
    Ret_Data.add_latLong(CompNames = ['Storages', 'Compressors', 'Productions', 'PipeSegments'])

    # Cleaning up node_id and nodes
    Ret_Data.merge_Nodes_Comps(compNames = ['Storages', 'Compressors', 'Productions', 'PipeSegments', 'Nodes'])
    Ret_Data.remove_unUsedNodes()

    # Unit Conversion
    Ret_Data.MoveUnits('Storages',     'max_cap_pipe2store_GWh_per_d', 'max_cap_pipe2store_M_m3_per_d', replace = True)
    Ret_Data.MoveUnits('Storages',     'max_cap_store2pipe_GWh_per_d', 'max_cap_store2pipe_M_m3_per_d', replace = True)
    Ret_Data.MoveUnits('PipeSegments', 'max_cap_GWh_per_d',            'max_cap_M_m3_per_d',            replace = True)
    Ret_Data.MoveUnits('Productions',  'max_production_GWh_per_d',     'max_production_M_m3_per_d',     replace = True)
    
    # removing attributes
    Ret_Data.removeAttrib('PipeSegments', ['max_cap_GWh_per_d'])
    Ret_Data.removeAttrib('Storages',     ['max_cap_pipe2store_GWh_per_d', 'max_cap_store2pipe_GWh_per_d'])
    Ret_Data.removeAttrib('Nodes',        ['compressor', 'ugs', 'production', 'comp_units'])
    Ret_Data.removeAttrib('Productions',  ['max_production_GWh_per_d'])

    
    # Assuring that all elements of a component having same attributes, and 
    # keeping track of origin of data
    Ret_Data.setup_SameAttribs([], None)

    # Adding further essential attributess
    Ret_Data.fill_length('PipeSegments')
    Ret_Data.make_Attrib(['PipeSegments'], 'lat',  'lat_mean',  'mean')
    Ret_Data.make_Attrib(['PipeSegments'], 'long', 'long_mean', 'mean')
    
    
    # Adding SourceName
    Ret_Data.SourceName      = ['LKD']
    
    
    
    return Ret_Data
Exemplo n.º 13
0
def join_Component_Meta(Elemente, Meta_Elemente, Meta_Namen, Meta_Typen,
                        Method_Name):
    """ Function to join elements (**Elemente**) with meta data of elements **Meta_Elemente**.  

    \n.. comments: 
    Input:
        Elemente:            Gas Netzwerk elements (topological information)
        Meta_Elemente:       Information of Meta data por pipelines
        Meta_Typen:          Variable type of the different Meta_Elemente (e.g. text, real)
        Meta_Namen:          Variabele namen of the Meta_Elemente
        Meta_Typen:          List of strings indicating the type of data.
    Return:
        Elemente:            Gas Netzwerk elements, linked with  Meta daten.
    """

    # Initializierung von Variabeln
    Meta_comp_ids = M_Helfer.get_attribFromList(Meta_Elemente, 'comp_id')
    countEle = 0
    posKeep = []
    posMeta = []
    try:
        for ele in Elemente:
            countMet = 0
            dieserWert = ele.id
            diserNodeID = ele.node_id

            pos = M_FindPos.find_pos_StringInList(dieserWert, Meta_comp_ids)

            if len(pos) > 0:
                posMeta.append(pos[0])
                posKeep.append(countEle)

                for idx, metName in enumerate(Meta_Namen):
                    if metName != 'comp_id' and metName != 'id':
                        # Check if param
                        if len(Method_Name[idx]) > 0:
                            Elemente[countEle].param.update(
                                {
                                    metName:
                                    getattr(Meta_Elemente[pos[0]], metName)
                                }
                            )  # setattr(Elemente[countEle], metName, getattr(Meta_Elemente[pos[0]], metName))
                            if getattr(
                                    Meta_Elemente[pos[0]], metName
                            ) == None:  # getattr(Meta_Elemente[pos[0]], metName) == None:
                                Elemente[countEle].param.update(
                                    {metName: None}
                                )  # setattr(Elemente[countEle], metName, None)

                        # Non param
                        else:
                            setattr(Elemente[countEle], metName,
                                    getattr(Meta_Elemente[pos[0]], metName))
                            if getattr(Meta_Elemente[pos[0]], metName) == None:
                                setattr(Elemente[countEle], metName, None)

                Elemente[countEle].node_id = diserNodeID
                Elemente[countEle].id = dieserWert

                countMet = countMet + 1

            countEle = countEle + 1

        Temp = K_Netze.NetComp()
        Temp.Temp = Elemente
        Temp.select_byPos('Temp', posKeep)
        Elemente = Temp.Temp

    except:
        print("ERROR: M_Verknuepfe.join_Component_Meta")
        raise

    return Elemente
Exemplo n.º 14
0
def read(NumDataSets=100000,
         requeYear='',
         licenseType='',
         GasType='H',
         RelDirName='Eingabe/InternetDaten/'):
    """ Reading in Internet data sets from Internet specific CSV file, with  
    **NumDataSets** maximum number of records to read, and **requeYear** for which year to get data.

    \n.. comments: 
    Input:
        NumDataSets:    (Optional = 100000) number of data sets
        requeYear:      (Optional = '2010') string containing year [####] for which data is to be retrieved
        licenseType:    (Optional = ''), string containing the kind of license that the data will be selected on
        GasType:        (Optional = 'H') a character indicating either H or L gas.
        RelDirName:     string, containing the relatie dir name where the Internet data can be loaded from.
    Return:
        Ret_Data:       Element of K_Netze.NetComp() class, being the SciGRID_gas component data set class 
    """

    Filter = {"year": requeYear, "license": licenseType, "GasType": GasType}
    Ret_Data = K_Netze.NetComp()
    MD = K_Component.MetaData()
    RelDirName = Path(RelDirName)

    # Reading Raw Data
    Ret_Data.Nodes = read_component("Nodes",
                                    NumDataSets,
                                    0,
                                    RelDirName=RelDirName)
    Ret_Data.BorderPoints = read_component("BorderPoints",
                                           NumDataSets,
                                           0,
                                           RelDirName=RelDirName)
    Ret_Data.Compressors = read_component("Compressors",
                                          NumDataSets,
                                          0,
                                          RelDirName=RelDirName)
    #Ret_Data.Consumers               = read_component("Consumers",      NumDataSets, 0, RelDirName = RelDirName)
    Ret_Data.EntryPoints = read_component("EntryPoints",
                                          NumDataSets,
                                          0,
                                          RelDirName=RelDirName)
    Ret_Data.InterConnectionPoints = read_component("InterConnectionPoints",
                                                    NumDataSets,
                                                    0,
                                                    RelDirName=RelDirName)
    Ret_Data.LNGs = read_component("LNGs",
                                   NumDataSets,
                                   0,
                                   RelDirName=RelDirName)
    Ret_Data.Storages = read_component("Storages",
                                       NumDataSets,
                                       0,
                                       RelDirName=RelDirName)

    Ret_Data.PipeLines = read_PipeLines(NumDataSets, RelDirName=RelDirName)

    # Meta Data
    [
        MD.BorderPoints_Meta, MD.BorderPoints_Meta_Type,
        MD.BorderPoints_Meta_Name, MD.BorderPoints_methodName
    ] = read_Meta("BorderPoints", RelDirName=RelDirName)
    [
        MD.Compressors_Meta, MD.Compressors_Meta_Type,
        MD.Compressors_Meta_Name, MD.Compressors_methodName
    ] = read_Meta("Compressors", RelDirName=RelDirName)
    [
        MD.EntryPoints_Meta, MD.EntryPoints_Type, MD.EntryPoints_Meta_Name,
        MD.EntryPoints_methodName
    ] = read_Meta("EntryPoints", RelDirName=RelDirName)
    [MD.LNGs_Meta, MD.LNGs_Meta_Type, MD.LNGs_Meta_Name,
     MD.LNGs_methodName] = read_Meta("LNGs", RelDirName=RelDirName)
    [
        MD.PipeLines_Meta, MD.PipeLines_Meta_Type, MD.PipeLines_Meta_Name,
        MD.PipePoints_methodName
    ] = read_Meta("PipePoints", RelDirName=RelDirName)
    [
        MD.Storages_Meta, MD.Storages_Meta_Type, MD.Storages_Meta_Name,
        MD.Storages_methodName
    ] = read_Meta("Storages", RelDirName=RelDirName)
    [
        MD.InterConnectionPoints_Meta, MD.InterConnectionPoints_Meta_Type,
        MD.InterConnectionPoints_Meta_Name, MD.InterConnectionPoints_methodName
    ] = read_Meta("InterConnectionPoints", RelDirName=RelDirName)

    # Filter of Data
    MD.BorderPoints_Meta = M_Filter.filter_Daten(Filter, MD.BorderPoints_Meta)
    MD.Compressors_Meta = M_Filter.filter_Daten(Filter, MD.Compressors_Meta)
    MD.EntryPoints_Meta = M_Filter.filter_Daten(Filter, MD.EntryPoints_Meta)
    MD.InterConnectionPoints_Meta = M_Filter.filter_Daten(
        Filter, MD.InterConnectionPoints_Meta)
    MD.LNGs_Meta = M_Filter.filter_Daten(Filter, MD.LNGs_Meta)
    MD.PipeLines_Meta = M_Filter.filter_Daten(Filter, MD.PipeLines_Meta)
    MD.Storages_Meta = M_Filter.filter_Daten(Filter, MD.Storages_Meta)

    # Part of joining elements.
    Ret_Data.BorderPoints = join_Component_Meta(Ret_Data.BorderPoints,
                                                MD.BorderPoints_Meta,
                                                MD.BorderPoints_Meta_Name,
                                                MD.BorderPoints_Meta_Type,
                                                MD.BorderPoints_methodName)
    Ret_Data.Compressors = join_Component_Meta(Ret_Data.Compressors,
                                               MD.Compressors_Meta,
                                               MD.Compressors_Meta_Name,
                                               MD.Compressors_Meta_Type,
                                               MD.Compressors_methodName)
    Ret_Data.EntryPoints = join_Component_Meta(Ret_Data.EntryPoints,
                                               MD.EntryPoints_Meta,
                                               MD.EntryPoints_Meta_Name,
                                               MD.EntryPoints_Type,
                                               MD.EntryPoints_methodName)
    Ret_Data.InterConnectionPoints = join_Component_Meta(
        Ret_Data.InterConnectionPoints, MD.InterConnectionPoints_Meta,
        MD.InterConnectionPoints_Meta_Name, MD.InterConnectionPoints_Meta_Type,
        MD.InterConnectionPoints_methodName)
    Ret_Data.LNGs = join_Component_Meta(Ret_Data.LNGs, MD.LNGs_Meta,
                                        MD.LNGs_Meta_Name, MD.LNGs_Meta_Type,
                                        MD.LNGs_methodName)
    Ret_Data.Storages = join_Component_Meta(Ret_Data.Storages,
                                            MD.Storages_Meta,
                                            MD.Storages_Meta_Name,
                                            MD.Storages_Meta_Type,
                                            MD.Storages_methodName)

    Ret_Data.PipeLines = join_PipeLine_Meta(Ret_Data.PipeLines,
                                            MD.PipeLines_Meta,
                                            MD.PipeLines_Meta_Name,
                                            MD.PipeLines_Meta_Type,
                                            MD.PipePoints_methodName)

    # Creation of PipeSegments and PipePoints
    Ret_Data.PipeLines2PipeSegments()
    Ret_Data.PipeSegments2PipePoints()

    # Unit conversion
    Ret_Data.MoveUnits('LNGs',
                       'storage_LNG_Mt',
                       'max_workingGas_M_m3',
                       replace=True)
    Ret_Data.MoveUnits('LNGs',
                       'max_cap_store2pipe_M_m3_per_a',
                       'max_cap_store2pipe_M_m3_per_d',
                       replace=True)
    Ret_Data.MoveUnits('Compressors',
                       'max_cap_M_m3_per_h',
                       'max_cap_M_m3_per_d',
                       replace=True)
    Ret_Data.MoveUnits('Storages',
                       'max_cap_pipe2store_GWh_per_d',
                       'max_cap_pipe2store_M_m3_per_d',
                       replace=True)
    Ret_Data.MoveUnits('Storages',
                       'max_cap_store2pipe_GWh_per_d',
                       'max_cap_store2pipe_M_m3_per_d',
                       replace=True)
    Ret_Data.MoveUnits('Storages',
                       'max_workingGas_TWh',
                       'max_workingGas_M_m3',
                       replace=True)

    # Removing attributes
    Ret_Data.removeAttrib('PipeSegments', ['meta_id'])
    Ret_Data.removeAttrib('LNGs',
                          ['storage_LNG_Mt', 'max_cap_store2pipe_M_m3_per_a'])
    Ret_Data.removeAttrib('Compressors', ['max_cap_M_m3_per_h'])
    Ret_Data.removeAttrib('Storages', [
        'max_cap_pipe2store_GWh_per_d', 'max_cap_store2pipe_GWh_per_d',
        'max_workingGas_TWh'
    ])

    Ret_Data.replaceAttribVal('Storages', 'store_type', 'Leeres Gas Feld',
                              'Depleted Field')
    Ret_Data.replaceAttribVal('Storages', 'store_type', 'Depleted gas field',
                              'Depleted Field')
    Ret_Data.replaceAttribVal('Storages', 'store_type', 'Leeres ?l Feld',
                              'Depleted Field')
    Ret_Data.replaceAttribVal('Storages', 'store_type', 'Leeres ?l/Gas Feld',
                              'Depleted Field')
    Ret_Data.replaceAttribVal('Storages', 'store_type', 'Leeres Feld',
                              'Depleted Field')

    Ret_Data.replaceAttribVal('Storages', 'store_type', 'Salz Kaverne',
                              'Salt cavern')

    Ret_Data.replaceAttribVal('Storages', 'store_type', 'Stein Kaverne',
                              'Rock Cavern')
    Ret_Data.replaceAttribVal('Storages', 'store_type',
                              'Leeres ?l Feld mit Gas Haube', 'Depleted Field')

    # Adding lat long
    Ret_Data.add_latLong()

    # removing unwanted components
    Ret_Data.PipeLines = []
    Ret_Data.PipePoints = []

    # Assuring that all elements of a component having same attributes, and
    # keeping track of origin of data
    Ret_Data.setup_SameAttribs([], None)

    # Adding further essential attributess
    Ret_Data.fill_length('PipeSegments')
    Ret_Data.make_Attrib(['PipeSegments'], 'lat', 'lat_mean', 'mean')
    Ret_Data.make_Attrib(['PipeSegments'], 'long', 'long_mean', 'mean')

    # Replacing any '' with None
    Ret_Data.replace_attrib(compNames=[],
                            attribNames=[],
                            attribValIn='',
                            attribValOut=None)
    Ret_Data.replace_attrib(compNames=[],
                            attribNames=[],
                            attribValIn='True',
                            attribValOut=1)
    Ret_Data.replace_attrib(compNames=[],
                            attribNames=[],
                            attribValIn='False',
                            attribValOut=0)

    # Cleaning up node_id and nodes
    Ret_Data.merge_Nodes_Comps(compNames=[
        'LNGs', 'Compressors', 'Storages', 'PipeSegments', 'EntryPoints',
        'InterConnectionPoints', 'BorderPoints', 'Nodes'
    ])
    Ret_Data.remove_unUsedNodes()

    Ret_Data.SourceName = ['InterNet']

    return Ret_Data
Exemplo n.º 15
0
def read(NumDataSets=100000,
         requeYear='2000',
         RelDirName='',
         RelDirNameInter='Eingabe/InternetDaten/'):
    """ Reading in GIE data sets from API, with **RelDirName** indicating which directory from where to 
    load the data from, **NumDataSets** maximum number of records to read, and **requeYear** for which 
    year to get data.

    \n.. comments: 
    Input:
        NumDataSets:    	number of data sets
							(default = 100000) 
		requeYear: 			string containing year [####] for which data to be retrieved
							(default = '2000') 
        RelDirName:     	string, of relative directory name where GIE meta data is loaded from
    Return:
	    Ret_Data: 	 	Data structure of components
    """

    Ret_Data = K_Netze.NetComp()
    Storages = []

    # Reading Raw Data
    Storages = read_component('Storages', NumDataSets, requeYear)

    # Generation of additional components
    Nodes = gen_component('Nodes', Storages)  # check this one

    # Assigning data to output Struct
    Ret_Data.Nodes = Nodes
    Ret_Data.Storages = Storages

    # Adding lat long if Netz_Internet supplied
    if len(RelDirNameInter) > 0:
        RelDirNameInter = Path(RelDirNameInter)
        Netz_Internet = K_Netze.NetComp()
        Netz_Internet.Nodes = M_Internet.read_component(
            "Nodes", 1e+100, 0, RelDirName=RelDirNameInter)

        [pos_match_Netz_0, pos_add_Netz_0, pos_match_Netz_1,
         pos_add_Netz_1] = JoinNetz.match(
             Netz_Internet,
             Ret_Data,
             compName='Nodes',
             threshold=75,
             multiSelect=True,
             funcs=lambda comp_0, comp_1: M_Matching.
             getMatch_Names_CountryCode(comp_0, comp_1, AddInWord=100),
             numFuncs=1)

        if len(pos_add_Netz_1) > 0:
            print('WARNING: M_IGU.read(): ' + str(len(pos_add_Netz_1)) +
                  ' from ' + str(len(Ret_Data.Storages)) +
                  ' locations could not be GeoReferenced.')
        else:
            print(
                'Comment: M_IGU.read(): All locations could were GeoReferenced.'
            )

        Ret_Data = M_Netze.copy_Vals(Netz_Internet, 'Nodes', 'lat', Ret_Data,
                                     'Nodes', 'lat', pos_match_Netz_0,
                                     pos_match_Netz_1)
        Ret_Data = M_Netze.copy_Vals(Netz_Internet, 'Nodes', 'long', Ret_Data,
                                     'Nodes', 'long', pos_match_Netz_0,
                                     pos_match_Netz_1)
        Ret_Data = M_Netze.copy_ParamVals(Netz_Internet, 'Nodes', 'exact',
                                          Ret_Data, 'Nodes', 'exact',
                                          pos_match_Netz_0, pos_match_Netz_1)

        Ret_Data.add_latLong()

    # Cleaning up node_id and nodes
    Ret_Data.merge_Nodes_Comps(compNames=['Storages', 'Nodes'])
    Ret_Data.remove_unUsedNodes()

    # Assuring that all elements of a component having same attributes, and
    # keeping track of origin of data
    Ret_Data.setup_SameAttribs([], None)

    # Adding SourceName
    Ret_Data.SourceName = ['IGU']

    return Ret_Data
Exemplo n.º 16
0
def read(RelDirName='Eingabe/CSV/', NumDataSets=1e+100, skiprows=[]):
    """Description:
    ------------
        Reads Data from folder CSV_Path into Grid 
        Grid = Instance of Netz Class
        
    Input Parameter:
    ----------------
        RelDirName    string containing path name of data location [default: 'Eingabe/CSV/']
        NumDataSets   Number of elements to be read for each component [default: 1e+100]
        skiprows      number of rows to skip [default: []]
    Return Parameters:
    ------------------
        Grid            instance of class K_Netze.Netz, populated with 
                         data from CSV files  """
    # Dir name stuff
    DirName = Path.cwd() / RelDirName

    Grid = K_Netze.NetComp()
    FileList = K_Netze.NetComp().CompLabels()
    for key in FileList:
        count = 0
        filename = 'Gas_' + key + '.csv'
        CSV_File = str(DirName / filename)

        # Z set to zero if file does not exist
        Z = CSV_2_list(CSV_File, skiprows=skiprows)
        if len(Z) > 0:
            for entry in Z:
                Keys = list(entry.keys())
                Vals = list(entry.values())
                for ii in range(len(Vals)):
                    if Vals[ii] == 'None':
                        Vals[ii] = None
                    elif type(Vals[ii]) is float:
                        if math.isnan(Vals[ii]):
                            Vals[ii] = None
                    else:
                        try:
                            Vals[ii] = float(Vals[ii])
                        except:
                            pass

                pos_Id = M_FindPos.find_pos_StringInList('id', Keys)
                pos_Name = M_FindPos.find_pos_StringInList('name', Keys)
                pos_SId = M_FindPos.find_pos_StringInList('source_id', Keys)
                pos_Node = M_FindPos.find_pos_StringInList('node_id', Keys)
                pos_CC = M_FindPos.find_pos_StringInList('country_code', Keys)
                pos_lat = M_FindPos.find_pos_StringInList('lat', Keys)
                pos_long = M_FindPos.find_pos_StringInList('long', Keys)
                pos_comm = M_FindPos.find_pos_StringInList('comment', Keys)
                pos_para = M_FindPos.find_pos_StringInList('param', Keys)
                pos_meth = M_FindPos.find_pos_StringInList('method', Keys)
                pos_unce = M_FindPos.find_pos_StringInList('uncertainty', Keys)
                pos_tags = M_FindPos.find_pos_StringInList('tags', Keys)

                del entry['id']
                del entry['name']
                del entry['source_id']
                del entry['node_id']
                del entry['country_code']

                del entry['lat']
                del entry['long']
                del entry['comment']
                del entry['param']
                del entry['method']
                del entry['uncertainty']
                del entry['tags']

                id = Vals[pos_Id[0]]
                name = Vals[pos_Name[0]]
                source_id = makeList(Vals[pos_SId[0]])
                node_id = makeList(Vals[pos_Node[0]])
                country_code = makeList(Vals[pos_CC[0]])

                lat = Vals[pos_lat[0]]
                if isinstance(lat, str):
                    lat = eval(lat)

                long = Vals[pos_long[0]]
                if isinstance(long, str):
                    long = eval(long)

                comment = Vals[pos_comm[0]]
                param = eval(Vals[pos_para[0]].replace(': nan,',
                                                       ': float(\'nan\'),'))
                method = eval(Vals[pos_meth[0]].replace(
                    ': nan,', ': float(\'nan\'),'))
                uncertainty = eval(Vals[pos_unce[0]].replace(
                    ': nan,', ': float(\'nan\'),'))
                tags = eval(Vals[pos_tags[0]].replace(': nan,',
                                                      ': float(\'nan\'),'))

                Grid.__dict__[key].append(K_Component.__dict__[key](
                    id=id,
                    name=name,
                    source_id=source_id,
                    node_id=node_id,
                    country_code=country_code,
                    param=param,
                    lat=lat,
                    long=long,
                    method=method,
                    uncertainty=uncertainty,
                    tags=tags,
                    comment=comment))
                count = count + 1
                if count >= NumDataSets:
                    break
        else:
            Grid.__dict__[key] = []

    return Grid
Exemplo n.º 17
0
def Graph2Netz(G_Set_Sum):
    """ Creation of a Netz from a networkx network 
    
    \n.. comments:
    Input:
        G_Set_Sum   Network of type networkx
    Return:
        G_Netz      Netz of type K_Netze.NetComp
    """

    G_Netz = K_Netze.NetComp()
    Pipe = []
    Nodes = []

    for node in G_Set_Sum.nodes():
        id = G_Set_Sum.node[node]['id'][0]
        lat = G_Set_Sum.node[node]['pos'][1]
        long = G_Set_Sum.node[node]['pos'][0]
        country_code = getAttrib(G_Set_Sum.node[node], 'country_code')
        param = getAttrib(G_Set_Sum.node[node], 'param', 'param')
        source_id = getAttrib(G_Set_Sum.node[node], 'source_id', id)
        node_id = getAttrib(G_Set_Sum.node[node], 'node_id', id)
        name = getAttrib(G_Set_Sum.node[node], 'name', id)

        Nodes.append(
            K_Component.Nodes(id=id,
                              name=name,
                              source_id=source_id,
                              node_id=node_id,
                              long=long,
                              lat=lat,
                              country_code=country_code,
                              param=param))

    G_Netz.Nodes = Nodes

    for edge in G_Set_Sum.edges():
        for xx in range(len(G_Set_Sum[edge[0]][edge[1]])):
            id = G_Set_Sum[edge[0]][edge[1]][xx]['id'][0]
            latS = G_Set_Sum.node[edge[0]]['pos'][1]
            longS = G_Set_Sum.node[edge[0]]['pos'][0]
            latE = G_Set_Sum.node[edge[1]]['pos'][1]
            longE = G_Set_Sum.node[edge[1]]['pos'][0]

            country_codeS = G_Set_Sum.node[edge[0]]['country_code']
            country_codeE = G_Set_Sum.node[edge[1]]['country_code']
            param = getAttrib(G_Set_Sum[edge[0]][edge[1]][xx], 'param',
                              'param')
            source_id = getAttrib(G_Set_Sum[edge[0]][edge[1]][xx], 'source_id',
                                  id)
            node_id = [str(edge[0]), str(edge[1])]
            name = getAttrib(G_Set_Sum[edge[0]][edge[1]][xx], 'name', id)

            Pipe.append(
                K_Component.PipeSegments(
                    id=id,
                    name=name,
                    source_id=source_id,
                    node_id=node_id,
                    long=[longS, longE],
                    lat=[latS, latE],
                    country_code=[country_codeS, country_codeE]))

    G_Netz.PipeSegments = Pipe

    return G_Netz