Ejemplo n.º 1
0
def CreateAreaFromWells(new_route_name, well_list, Update_User):
    from Model import ModelLayer as m
    from Model import QueryFile as qf
    from Model import BPXDatabase as bpx
    from datetime import datetime
    import pandas as pd

    Success = True
    Messages = []

    try:
        WellQuery = qf.EDWKeyQueryFromWellName(well_list)
        obj = bpx.GetDBEnvironment('ProdEDW', 'OVERRIDE')

        wells, wells_df = obj.Query(WellQuery)
        for idx, well in wells_df.iterrows():
            WellName = well['WellName']
            corpID = well['CorpID']
            AggregateRowObj = m.AreaAggregationRow(new_route_name, WellName,
                                                   corpID, '')
            Row_Success, Message = AggregateRowObj.Write(
                Update_User, datetime.now())

            if not Row_Success:
                Messages.append(Message)

    except Exception as ex:
        Success = False
        Messages.append(
            'Error during the creation of the area from well list. ' + str(ex))

    return Success, Messages
Ejemplo n.º 2
0
def ImportGFOFromDB2018(start_date, end_date, WellFlac=['ALL']):
    from Model import BPXDatabase as bpxdb
    from Model import QueryFile as qf
    import pandas as pd
    from datetime import datetime

    return_df = pd.DataFrame()
    Success = True
    Messages = []
    try:
        if isinstance(start_date, datetime):
            start_date = '\'' + start_date.strftime('%Y-%m-%d %H:%M:%S') + '\''

        if isinstance(end_date, datetime):
            end_date = '\'' + end_date.strftime('%Y-%m-%d %H:%M:%S') + '\''

        TeamOpsObj = bpxdb.GetDBEnvironment('OnPrem', 'OVERRIDE')
        query = qf.GetGFOFromEastDB2018(WellFlac, start_date, end_date)
        results = TeamOpsObj.Query(query)
        return_df = results[1]
    except Exception as ex:
        Messages.append(
            'Error retrieving the GFO data from the desired table. ' + str(ex))
        Success = False

    return return_df, Success, Messages
Ejemplo n.º 3
0
def ImportAriesByScenario(scenarioName,
                          start_date,
                          end_date,
                          Area,
                          CorpID=['ALL']):
    from Model import BPXDatabase as bpxdb
    from Model import QueryFile as qf
    import pandas as pd

    #Create ODS and EDW objects
    Success = True
    Messages = []
    combined_df = pd.DataFrame()
    try:
        ODSobj = bpxdb.GetDBEnvironment('ProdODS', 'OVERRIDE')
        EDWobj = bpxdb.GetDBEnvironment('ProdEDW', 'OVERRIDE')

        scenario_query = qf.ScenarioQuery(scenarioName, CorpID, start_date,
                                          end_date)
        results = ODSobj.Query(scenario_query)

        #Extract APINumbers from the results and concatenate into an 'IN' sql clause
        corpid_list = []
        for result in results[0]:
            if not result['CorpID'] in corpid_list:
                corpid_list.append(result['CorpID'])

        if None in corpid_list:
            corpid_list.remove(None)

        key_query = qf.EDWKeyQueryFromCorpID(corpid_list, Area)
        key_results = EDWobj.Query(key_query)

        #Join the key_results to the Aries results
        combined_df = pd.merge(results[1],
                               key_results[1],
                               on='CorpID',
                               how='right')

    except Exception as ex:
        Messages.append('Error on Import from Aries. ' + str(ex))
        Success = False

    return combined_df, Success, Messages
Ejemplo n.º 4
0
def SOHA_WriteGasNettingFactorsFromDB(Update_User, Update_Date, wellnames=[]):
    from Model import BPXDatabase as bpx
    from Model import QueryFile as qf
    from Model import ModelLayer as m
    import datetime as datetime

    Success = True
    Messages = []

    try:
        config = m.GetConfig()
        DBObj = bpx.BPXDatabase(config['server'], config['database'],
                                config['UID'])
        TeamOpsObj = bpx.GetDBEnvironment('OnPrem', 'OVERRIDE')
        EDWObj = bpx.GetDBEnvironment('ProdEDW', 'OVERRIDE')

        #Get Well List of required netting values from data that is already in database.
        query = qf.GetNettingFactorsfromDB(wellnames)
        res, res_df = TeamOpsObj.Query(query)

        count = 1
        for idx, item in res_df.iterrows():
            wellquery = qf.EDWKeyQueryFromWellName([item['WellName']])
            res, well_row = EDWObj.Query(wellquery)
            if not well_row.empty:
                corpID = well_row['CorpID'].values[0]
                NettingObj = m.GasNettingRow(item['WellName'], corpID,
                                             item['NF'],
                                             item['FirstSalesDateInput'],
                                             DBObj)
                Success, Message = NettingObj.Write(Update_User, Update_Date)
                if not Success:
                    Messages.append(Message)

            callprogressbar(count, res_df.shape[0])
            count = count + 1

    except Exception as ex:
        Success = False
        Messages.append('Error during write of netting factors to DB. ' +
                        str(ex))

    return Success, Messages
Ejemplo n.º 5
0
def ImportActuals(corpID_list, start_date, end_date, LEName=''):
    from Model import BPXDatabase as bpxdb
    from Model import QueryFile as qf
    from Model import ModelLayer as m
    from datetime import datetime
    import pandas as pd

    Success = True
    Messages = []
    Actuals = []
    try:
        if isinstance(start_date, datetime):
            start_date = '\'' + start_date.strftime('%Y-%m-%d %H:%M:%S') + '\''

        if isinstance(end_date, datetime):
            end_date = '\'' + end_date.strftime('%Y-%m-%d %H:%M:%S') + '\''

        EDWobj = bpxdb.GetDBEnvironment('ProdEDW', 'OVERRIDE')
        query = qf.GetActualsFromDB(corpID_list, start_date, end_date)
        results = EDWobj.Query(query)

        Actuals = results[1]

        #ToDo: Add optional parameter of LE Name. Scan the production adjustments table for any overrides of
        #potentially incorrect production values from the EDH/EDW tables
        if LEName:
            ProdAdjObj = m.ProductionAdjustments('', [LEName], [], [])
            ProdAdjsRows, Success, Message = ProdAdjObj.ReadTable()
            if not Success:
                Messages.append(Message)

            #Query the production adjustments and see if there is any well entries for the given LE
            if len(ProdAdjsRows) > 0:
                #Loop through the results of the above query.
                for row in ProdAdjsRows:
                    #Query the Actuals dataframe for the well and the dates and then update the production value (as long as value is not null)
                    date = row.Date_Key.date()
                    ActualsRow = Actuals.query(
                        'CorpID == @row.CorpID and Date_Key == @date')
                    if not ActualsRow.empty:
                        idx = ActualsRow.index.values[0]
                        if row.AdjustedGasProduction:
                            Actuals.loc[idx, 'Gas'] = row.AdjustedGasProduction
                        if row.AdjustedOilProduction:
                            Actuals.loc[idx, 'Oil'] = row.AdjustedOilProduction
                        if row.AdjustedWaterProduction:
                            Actuals.loc[idx,
                                        'Water'] = row.AdjustedWaterProduction

    except Exception as ex:
        Messages.append('Error during query for actuals data. ' + str(ex))
        Success = False

    return Actuals, Success, Messages
Ejemplo n.º 6
0
def CreateAreaWellsFromRoute(new_route_name, db_route_name, Update_User):
    from Model import ModelLayer as m
    from Model import QueryFile as qf
    from Model import BPXDatabase as bpx
    from datetime import datetime
    import pandas as pd

    Success = True
    Messages = []

    try:

        ODSObj = bpx.GetDBEnvironment('ProdODS', 'OVERRIDE')
        EDWObj = bpx.GetDBEnvironment('ProdEDW', 'OVERRIDE')
        query = qf.RouteQuery([db_route_name])
        results = ODSObj.Query(query)

        for idx, row in results[1].iterrows():
            wellflac_query = qf.EDWKeyQueryFromWellFlac([row['wellflac']])
            corpIDres = EDWObj.Query(wellflac_query)
            if not corpIDres[1].empty:
                corpID = corpIDres[1]['CorpID'].iloc[0]
                AggregateRowObj = m.AreaAggregationRow(new_route_name,
                                                       row['name'], corpID, '')
                Row_Success, Message = AggregateRowObj.Write(
                    Update_User, datetime.now())

                if not Row_Success:
                    Messages.append(Message)
            else:
                Messages.append('Missing well entry in key database EDW: ' +
                                row['name'])

    except Exception as ex:
        Success = False
        Messages.append(
            'Error during Area Aggegation interface from Enbase. ' + str(ex))

    return Success, Messages
Ejemplo n.º 7
0
    def __init__(self, app):
        from Model import BPXDatabase

        super(Window, self).__init__()
        [self.screenheight, self.screenwidth] = self.GetScreenDimensions()
        self.setGeometry(self.screenheight / 9, self.screenwidth / 9,
                         0.7 * self.screenheight, 0.7 * self.screenwidth)
        self.setWindowTitle('BPX LE Summary')
        self.setWindowIcon(QtGui.QIcon('bplogo.png'))
        app.setStyle("Fusion")
        self.DBobj = BPXDatabase.GetDBEnvironment('ProdEDW', 'OVERRIDE')
        self.palette = self.GetPalette()
        self.Format()
Ejemplo n.º 8
0
def GetWellandCorpID(WellName, CorpID):
    from Model import QueryFile as qf
    from Model import BPXDatabase as bpx

    #Check CorpID if Wellname is passed
    if not CorpID and WellName:
        CorpID_query = qf.EDWKeyQueryFromWellName([WellName])
        EDWObj = bpx.GetDBEnvironment('ProdEDW', 'OVERRIDE')
        res, res_df = EDWObj.Query(CorpID_query)

        if not res_df.empty:
            CorpID = res_df['CorpID'].values[0]

    #Check WellName if CorpID not passed
    if not WellName and CorpID:
        WellName_Query = qf.EDWKeyQueryFromCorpID([CorpID], '')
        EDWObj = bpx.GetDBEnvironment('ProdEDW', 'OVERRIDE')
        res, res_df = EDWObj.Query(WellName_Query)

        if not res_df.empty:
            WellName = res_df['WellName'].values[0]

    return WellName, CorpID
Ejemplo n.º 9
0
def GetWedgeData(CorpID, SuppressMessages):
    from Model import QueryFile as qf
    from Model import BPXDatabase as bpx

    from datetime import datetime, timedelta
    import pandas as pd

    Messages = []

    #Get Wedge from First Production Date
    #If an area is passed in as an aggregate, the first production date will be the oldest first production date of its associated well list.
    well_list = GetFullWellList([CorpID])
    first_sales_date_query = qf.FirstProductionDateQuery(well_list)
    first_results = bpx.GetDBEnvironment(
        'ProdEDW', 'OVERRIDE').Query(first_sales_date_query)

    msg = 'Skipped input due to lack of first production date.' + CorpID
    Wedge = ''
    if not first_results[1].empty:
        #check current year and determine if the year of the first production is last year, this year, or base (anything prior to last year)
        prod_date = first_results[1]['FirstProductionDate'].values[0]
        prod_date = pd.to_datetime(prod_date)

        if prod_date:
            prod_year = prod_date.year
            this_year = datetime.now().year
            last_year = (datetime.now() - timedelta(days=365)).year

            if prod_year == this_year:
                Wedge = str(this_year) + ' NWD'
            elif prod_year == last_year:
                Wedge = str(last_year) + ' NWD'
            else:
                Wedge = 'Base'
        else:
            if not SuppressMessages:
                print(msg)
            Messages.append(msg)

    else:
        Messages.append(msg)
        if not SuppressMessages:
            print(msg)

    return Wedge, Messages
Ejemplo n.º 10
0
def WriteDefaultMultipliers(LE_Name, DefaultValue, Update_User, Update_Date,
                            SuppressMessages):
    import datetime as datetime
    from Model import BPXDatabase as bpx
    from Model import ModelLayer as m

    Success = True
    Messages = []

    try:
        config = m.GetConfig()
        DBObj = bpx.BPXDatabase(config['server'], config['database'],
                                config['UID'])

        #Query the LE results
        LE_query = 'select * from [LEForecastDatabase].[dbo].[LE_Data] where HeaderName = \'' + LE_Name + '\''
        res, df = DBObj.Query(LE_query)

        count = 1
        for idx, row in df.iterrows():
            FracHitObj = m.FracHitMultipliersRow(row['HeaderName'],
                                                 row['CorpID'],
                                                 row['Date_Key'],
                                                 str(DefaultValue), DBObj)
            Success, Message = FracHitObj.Write(Update_User, Update_Date)
            if not Success:
                Messages.append(Message)

            if not SuppressMessages:
                callprogressbar(count, df.shape[0])
            count = count + 1

    except Exception as ex:
        Success = False
        Messages.append(
            'Error during write of default frac hit multipliers. ' + str(ex))

    return Success, Messages
Ejemplo n.º 11
0
def GetActuals(WellorArea, Wedge, start_date, end_date, LEName = '', Adjusted = False, Phase = 'Gas'):
    from Model import ModelLayer as m
    import pandas as pd
    from Controller import SummaryModule as s
    from Model import ImportUtility as i
    from Model import QueryFile as qf
    from Model import BPXDatabase as bpx

    Success = True
    Messages = []
    ActualProduction = []
    try:
        WellList = []
        if WellorArea:
            WellList = s.GetFullWellList(WellorArea)
        if Wedge:
            wedge_list, Success, Message = GetWellorAreaByWedge(Wedge)
            if not Success:
                Messages.append('Error finding wells associated with input Wedge. ')
            else:
                WellList.extend(wedge_list)


        EDWobj = bpx.GetDBEnvironment('ProdEDW', 'OVERRIDE')
        corpid_query = qf.EDWKeyQueryFromWellName(WellList)
        corpid_list = EDWobj.Query(corpid_query)
        corpid_list = list(corpid_list[1]['CorpID'])
        if LEName and Adjusted == True:
            actuals_df, Success, Message = i.ImportActuals(corpid_list, pd.to_datetime(start_date), pd.to_datetime(end_date), LEName)   
        else:
            actuals_df, Success, Message = i.ImportActuals(corpid_list, pd.to_datetime(start_date), pd.to_datetime(end_date), '')
        
        if not Success:
            Messages.append(Message)
        else:
            dates = actuals_df['Date_Key'].unique()
            prod_array = []
            date_array = []
            if not actuals_df.empty:
                if Phase == 'Gas':
                    for date in dates:
                        results = actuals_df.query('Date_Key == @date')
                        prod_array.append(results['Gas'].sum())
                        date_array.append(date)
                    ActualProduction = ProductionData(date_array, prod_array, Phase,'scf')
                elif Phase == 'Water':
                    for date in dates:
                        results = actuals_df.query('Date_Key == @date')
                        prod_array.append(results['Water'].sum())
                        date_array.append(date)
                    ActualProduction = ProductionData(date_array, prod_array, Phase,'bbl')
                elif Phase == 'Oil':
                    for date in dates:
                        results = actuals_df.query('Date_Key == @date')
                        prod_array.append(results['Oil'].sum())
                        date_array.append(date)
                    ActualProduction = ProductionData(date_array, prod_array, Phase,'bbl')

    except Exception as ex:
        Success = False
        Messages.append('Error during import of Actual production data. ' + str(ex))

    return ActualProduction, Success, Messages
Ejemplo n.º 12
0
def GetForecastProduction(ForecastName, Wedge, WellorArea, Phase = 'Gas'):
    from datetime import datetime
    from Model import ModelLayer as m
    import pandas as pd
    from Model import BPXDatabase as bpx
    from Model import QueryFile as qf
    
    #Get the production values from the LE table by the input criteria
    Success = True
    Messages = []
    ProductionDataObj = []
    try:

        #Get all CorpIDs from Well or Area passed
        well_list = []
        corpid_list = []
        if WellorArea:
            well_list.append(WellorArea)
            EDWobj = bpx.GetDBEnvironment('ProdEDW', 'OVERRIDE')
            corpid_query = qf.EDWKeyQueryFromWellName(well_list)
            corpid_list = EDWobj.Query(corpid_query)
            corpid_list = list(corpid_list[1]['CorpID'])
        if Wedge:
            wedge_list, Success, Message = GetWellorAreaByWedge(Wedge)
            if not Success:
                Messages.append(Message)
            else:
                well_list.extend(wedge_list)

        ForecastDataRowObj = m.ForecastData('', [ForecastName], corpid_list, [])
        ForecastData, Success, Message = ForecastDataRowObj.ReadTable()
        if not Success:
            Messages.append(Message)
        else:
            prod_Forecast_rows = pd.DataFrame([vars(s) for s in ForecastData])

            dates = prod_Forecast_rows['Date_Key'].unique()
            prod_array = []
            date_array = []
            if not prod_Forecast_rows.empty:
                if Phase == 'Gas':
                    for date in dates:
                        results = prod_Forecast_rows.query('Date_Key == @date')
                        prod_array.append(results['Gas_Production'].sum())
                        date_array.append(date)
                    ProductionDataObj = ProductionData(date_array, prod_array, Phase,'scf')
                elif Phase == 'Water':
                    for date in dates:
                        results = prod_Forecast_rows.query('Date_Key == @date')
                        prod_array.append(results['Water_Production'].sum())
                        date_array.append(date)
                    ProductionDataObj = ProductionData(date_array, prod_array, Phase,'bbl')
                elif Phase == 'Oil':
                    for date in dates:
                        results = prod_Forecast_rows.query('Date_Key == @date')
                        prod_array.append(results['Oil_Production'].sum())
                        date_array.append(date)
                    ProductionDataObj = ProductionData(date_array, prod_array, Phase,'bbl')

    except Exception as ex:
        Success = False
        Messages.append('Error during collection of production data. ' + str(ex))

    return ProductionDataObj, Success, Messages
Ejemplo n.º 13
0
def FracHatMitigation(LEName, EastWestFracHitRadius, NorthSouthFracHitRadius,
                      Update_User):
    from Model import ModelLayer as m
    from Model import QueryFile as qf
    from Model import BPXDatabase as bpx
    import pandas as pd
    from datetime import datetime, timedelta
    import numpy as np

    Success = True
    Messages = []
    try:
        #Find Frac Date of Upcoming Wells

        #Get the beginning and end date of the LE being evaluated
        LEDataObj = m.LEData('', [LEName], [], [])
        LErows, Success, Message = LEDataObj.ReadTable()

        if not Success:
            Messages.append(Message)
        elif len(LErows) < 1:
            Messages.append('No LE Data exists for the given LE Name.')
        else:
            LErows_df = pd.DataFrame([vars(s) for s in LErows])
            start_date = LErows_df['Date_Key'].min()
            end_date = LErows_df['Date_Key'].max()

            s_start_date = datetime.strftime(start_date, '%m/%d/%Y')
            s_end_date = datetime.strftime(end_date, '%m/%d/%Y')

            #Query the Drill Schedule (and potentially other data sources) for upcoming drills

            new_drill_query = qf.GetActenumDrillScheduleData(
                s_start_date, s_end_date)
            DBObj = bpx.GetDBEnvironment('ProdEDH', 'OVERRIDE')
            dso_results = DBObj.Query(new_drill_query)
            if not Success:
                Messages.append(Message)

            else:
                dso_df = dso_results[1]

                for nd_idx, nd_row in dso_df.iterrows():
                    #Get Lateral and Longitude values
                    surface_lat = nd_row['SurfaceLatitude']
                    surface_long = nd_row['SurfaceLongitude']
                    if surface_long > 0:
                        surface_long = 0 - surface_long
                    bh_lat = nd_row['BottomHoleLatitude']
                    bh_long = nd_row['BottomHoleLongitude']
                    if bh_long > 0:
                        bh_long = 0 - bh_long
                    stages = nd_row['ExpectedStages']
                    name = nd_row['WellName']
                    frac_start = nd_row['StartFracDate']
                    frac_end = nd_row['EndFracDate']

                    #Get wells within certain distance
                    FracHitRadius = max(EastWestFracHitRadius,
                                        NorthSouthFracHitRadius)
                    from_surface_query = qf.GetWellsWithinBearing(
                        surface_lat, surface_long, FracHitRadius)
                    from_bottom_query = qf.GetWellsWithinBearing(
                        bh_lat, bh_long, FracHitRadius)

                    surface_res = DBObj.Query(from_surface_query)
                    bh_res = DBObj.Query(from_bottom_query)

                    if not surface_res[1].empty:
                        all_res = surface_res[1]
                        if not bh_res[1].empty:
                            all_res = pd.merge(surface_res[1], bh_res[1])
                    elif not bh_res[1].empty:
                        all_res = bh_res[1]
                    else:
                        all_res = pd.DataFrame()

                    stages = int(stages)

                    #Get interpolated bearings
                    interpolated_bearings = InterpolateBearing(
                        [bh_lat, bh_long], [surface_lat, surface_long], stages
                    )  #reversed since frac will begin at bottom hole location
                    interpolated_dates = InterpolateDates(
                        frac_start, frac_end, stages)
                    interp = np.column_stack(
                        [interpolated_bearings, interpolated_dates])

                    if all_res.empty:
                        Messages.append(
                            'No wells within given frac hit radius from ' +
                            name + ' to apply multipliers.')

                    #Loop through all the well results
                    for ex_idx, ex_row in all_res.iterrows():
                        corpid = ex_row['UWI']
                        wellname = ex_row['WELL_NAME']
                        dates = []
                        ex_surface_lat = float(ex_row['SurfaceLatitude'])
                        ex_surface_long = float(ex_row['SurfaceLongitude'])
                        if ex_surface_long > 0:
                            ex_surface_long = 0 - ex_surface_long
                        ex_bh_lat = float(ex_row['BottomHoleLatitude'])
                        ex_bh_long = float(ex_row['BottomHoleLongitude'])
                        if ex_bh_long > 0:
                            ex_bh_long = 0 - ex_bh_long

                        ex_interp_bearings = InterpolateBearing(
                            [ex_surface_lat, ex_surface_long],
                            [ex_bh_lat, ex_bh_long], stages)

                        # cycle through each item in the interpolated array and calculate distance to points in existing well
                        for point in interp:
                            for bearing in ex_interp_bearings:
                                distance = CalculateDistanceFromBearings(
                                    [point[0], point[1]], bearing)
                                azimuth = CalculateAzimuthFromBearings(
                                    [point[0], point[1]], bearing)

                                if (azimuth >= 25 and azimuth <= 155) or (
                                        azimuth >= 205 and azimuth <= 335):
                                    #Evaluate against EastWestFrac radius
                                    if distance < EastWestFracHitRadius:
                                        dates.append(point[2])
                                else:
                                    if distance < NorthSouthFracHitRadius:
                                        dates.append(point[2])

                        dates = list(dict.fromkeys(dates))
                        #Go through the dates and add the ramp up, ramp up schedule dates and multipliers
                        if len(dates) > 0:
                            ramp_up = [1, 1, 0.5]
                            ramp_down = [0.5, 0.75, 1]

                            min_date = min(dates)
                            max_date = max(dates)
                            ramp_down_dates = [(min_date - timedelta(days=3)),
                                               (min_date - timedelta(days=2)),
                                               (min_date - timedelta(days=1))]
                            ramp_up_dates = [(max_date + timedelta(days=3)),
                                             (max_date + timedelta(days=2)),
                                             (max_date + timedelta(days=1))]

                            shut_ins = [0] * len(dates)
                            all_multipliers = []
                            all_multipliers.extend(ramp_up)
                            all_multipliers.extend(shut_ins)
                            all_multipliers.extend(ramp_down)

                            all_dates = []
                            all_dates.extend(ramp_up_dates)
                            all_dates.extend(dates)
                            all_dates.extend(ramp_down_dates)

                            combined_date_multiplier = np.column_stack(
                                [all_dates, all_multipliers])

                            for date_multiplier in combined_date_multiplier:
                                #Search DB for frac hit multiplier row
                                date = datetime.strftime(
                                    date_multiplier[0], '%m/%d/%Y')
                                fhm_obj = m.FracHitMultipliers(
                                    '', [LEName], [corpid], [date])
                                fhm_rows, Success, Message = fhm_obj.ReadTable(
                                )

                                if Success and len(fhm_rows) > 0:
                                    #if the row exists, then update
                                    row = fhm_rows[0]
                                    row.Multiplier = date_multiplier[1]
                                    Success, Message = row.Update(
                                        Update_User, datetime.now())
                                    if not Success:
                                        Messages.extend(Message)
                                elif Success and len(fhm_rows) == 0:
                                    Messages.append('No entry exists for ' +
                                                    wellname + ' on ' + date +
                                                    ' for LE ' + LEName + '. ')
                                else:
                                    Messages.extend(Message)

    except Exception as ex:
        Success = False
        Messages.append(
            'Error during the automatic application of frac hit mitigation multipliers. '
            + str(ex))

    return Success, Messages
Ejemplo n.º 14
0
def WriteLEFromTemplate(all_data_df,
                        InterpolationMethod,
                        LEName,
                        LE_Date,
                        Update_User,
                        IDCol='WellName'):
    from datetime import datetime, date
    import pandas as pd
    from Model import QueryFile as qf
    from Model import BPXDatabase as bpx
    from Model import ImportUtility as i

    Success = True
    Messages = []
    results = []
    try:
        #Data Frame must be the same structure as the output from the 'Read From Excel Function
        #'CorpID', 'WellName', 'Wedge', 'Date', 'Gas', 'Oil', 'Water', 'OilNF', 'GasNF'

        wellname = ''
        if not Success:
            Messages.append(Message)

        if IDCol == 'CorpID':
            corpid_list = list(all_data_df['CorpID'].unique())
            corpid_query = qf.EDWKeyQueryFromCorpID(corpid_list)
            corpid_results, corpid_df = bpx.GetDBEnvironment(
                'ProdEDW', 'OVERRIDE').Query(corpid_query)
            well_list = list(corpid_df['WellName'].unique())

            well_query = 'CorpID == @corpid'
        else:
            well_list = list(all_data_df['WellName'].unique())
            well_query = 'WellName == @wellname'

        well_list = [i for i in well_list if i]

        for wellname in well_list:

            wellname, corpid = i.GetWellandCorpID(wellname, '')
            if not corpid:
                corpid = wellname

            data_df = all_data_df.query(well_query)

            row_count = 1
            if not data_df.empty:
                df_previous_row = (0, data_df.iloc[1])

                for idx, df_row in data_df.iterrows():
                    if InterpolationMethod == 'MonthlyRates':
                        if row_count == 1:
                            df_next_row = data_df.iloc[row_count]
                            results = InterpolateDailyRatesFromMonthlyRates(
                                CurrentMonthVal=df_row,
                                NextMonthVal=df_next_row,
                                GasProduction='Gas',
                                OilProduction='Oil')

                        elif row_count != data_df.shape[0] and row_count != 1:
                            df_next_row = data_df.iloc[row_count]
                            results = InterpolateDailyRatesFromMonthlyRates(
                                CurrentMonthVal=df_row,
                                NextMonthVal=df_next_row,
                                PreviousMonthVal=df_previous_row,
                                GasProduction='Gas',
                                OilProduction='Oil')

                        elif row_count == data_df.shape[0]:
                            results = InterpolateDailyRatesFromMonthlyRates(
                                CurrentMonthVal=df_row,
                                PreviousMonthVal=df_previous_row,
                                GasProduction='Gas',
                                OilProduction='Oil')

                    elif InterpolationMethod == 'MonthlyVolume':
                        if row_count == 1:
                            df_next_row = data_df.iloc[row_count]
                            results = InterpolateDailyRatesFromMonthlyVolumes(
                                CurrentMonthVal=df_row[1],
                                NextMonthVal=df_next_row)
                        else:
                            results = InterpolateDailyRatesFromMonthlyVolumes(
                                CurrentMonthVal=df_row[1],
                                PreviousMonthVal=df_previous_row[1])

                    elif InterpolationMethod == 'None':
                        results = ConvertNonInterpolatedResults(df_row)

                    Wedge, Message = i.GetWedgeData(corpid, True)
                    Success, Message = WriteInterpolatedLEToDB(
                        LEName, wellname, corpid, '', Wedge, LE_Date,
                        Update_User, results)
                    if not Success:
                        Messages.append(Message)

                    df_previous_row = df_row
                    row_count = row_count + 1

    except Exception as ex:
        Success = False
        Messages.append('Error during the writing of the LE from template. ' +
                        str(ex))

    return Success, Messages
Ejemplo n.º 15
0
def WriteAriesScenarioToDB(scenarioName,
                           ForecastName,
                           ForecastYear,
                           start_date,
                           end_date,
                           User,
                           Area,
                           GFO=False,
                           CorpID=['ALL']):
    from Model import ImportUtility as i
    from Model import BPXDatabase as bpxdb
    from Model import ModelLayer as m
    import datetime as dt

    Success = True
    Messages = []

    try:
        #Query the Aries database using import methods
        scenario_results, Success, Messages = i.ImportAriesByScenario(
            scenarioName, start_date, end_date, Area)

        #Create NF columns for oil and gas (replace nan with 0)
        scenario_results['OilNF'] = scenario_results[
            'C754'] / scenario_results['GasProduction']
        scenario_results['GasNF'] = scenario_results[
            'C753'] / scenario_results['OilProduction']

        scenario_results = scenario_results.fillna(0)

        #Obtain list from scenario query results
        CorpID_list = scenario_results['CorpID'].to_list()
        CorpID_list = list(set(CorpID_list))

        config = m.GetConfig()
        DBObj = bpxdb.BPXDatabase(config['server'], config['database'],
                                  config['UID'])

        #Linearly regress the data
        #Two segments: previous month's mid average and next month's mid average - regress to both to get the values.
        count = 1
        for corpID in CorpID_list:
            #Get the subset of results that match this wellflac
            corpid_scenario_df = scenario_results.query('CorpID == @corpID')
            corpid_scenario_df = corpid_scenario_df.sort_values(by=['Date'],
                                                                ascending=True)

            if corpid_scenario_df.shape[0] > 1:
                df_previous_row = (0, corpid_scenario_df.iloc[1])

                wellflac_count = 1
                header_corpID = ''
                for df_row in corpid_scenario_df.iterrows():

                    if wellflac_count == 1:
                        df_next_row = corpid_scenario_df.iloc[wellflac_count]
                        results = InterpolateDailyRatesFromMonthlyVolumes(
                            CurrentMonthVal=df_row[1],
                            NextMonthVal=df_next_row)
                    else:
                        results = InterpolateDailyRatesFromMonthlyVolumes(
                            CurrentMonthVal=df_row[1],
                            PreviousMonthVal=df_previous_row[1])

                    Success, Message = WriteInterpolatedForecastToDB(
                        df_row[1]['WellName'], corpID, ForecastName,
                        ForecastYear, scenarioName, GFO, User, results)

                    if not Success:
                        Messages.append(Message)
                        break

                    df_previous_row = df_row

                    wellflac_count = wellflac_count + 1

            callprogressbar(count, len(CorpID_list))
            count = count + 1

    except Exception as ex:
        Success = False
        Messages.append(
            'Failed to write the results from chosen scenario in Aries database. '
            + str(ex))

    return Success, Messages
Ejemplo n.º 16
0
def SOHA_WriteInternalForecasttoDB(df,
                                   ForecastName,
                                   ForecastYear,
                                   Production_Column_Name,
                                   User,
                                   GFO=True):
    #Part of to be deprecated methods to convert SoHa internal GFO data to standard
    from Model import BPXDatabase as bpx
    from Model import ModelLayer as m
    import datetime as dt
    from Model import QueryFile as qf

    Success = True
    Messages = []

    try:
        config = m.GetConfig()
        DBObj = bpx.BPXDatabase(config['server'], config['database'],
                                config['UID'])
        EDWObj = bpx.GetDBEnvironment('ProdEDW', 'OVERRIDE')

        wellname_list = df['WellName'].unique()
        wellname_list = list(wellname_list)
        if '' in wellname_list:
            wellname_list.remove('')
        count = 1
        for name in wellname_list:
            monthly_df = df.query('WellName == @name')
            monthly_df = monthly_df.sort_values(by=['Date'], ascending=True)
            df_previous_row = (0, monthly_df.iloc[1])
            nettingFactor = monthly_df['NettingFactor'].values[0]
            well_count = 1
            header_corpid = ''
            for df_row in monthly_df.iterrows():
                if well_count == 1:
                    df_next_row = monthly_df.iloc[well_count]
                    results = InterpolateDailyRatesFromMonthlyRates(
                        CurrentMonthVal=df_row[1],
                        NextMonthVal=df_next_row,
                        GasRateField=Production_Column_Name)
                elif well_count != monthly_df.shape[0] and well_count != 1:
                    df_next_row = monthly_df.iloc[well_count]
                    results = InterpolateDailyRatesFromMonthlyRates(
                        CurrentMonthVal=df_row[1],
                        NextMonthVal=df_next_row,
                        PreviousMonthVal=df_previous_row[1],
                        GasRateField=Production_Column_Name)
                elif well_count == monthly_df.shape[0]:
                    results = InterpolateDailyRatesFromMonthlyRates(
                        CurrentMonthVal=df_row[1],
                        PreviousMonthVal=df_previous_row[1],
                        GasRateField=Production_Column_Name)

                for row in results.iterrows():
                    corpid_query = qf.EDWKeyQueryFromWellName([name])
                    corpid_results = EDWObj.Query(corpid_query)
                    if not corpid_results[1].empty:
                        CorpID = corpid_results[1].at[0, 'CorpID']
                    else:
                        CorpID = name
                    WellName = name
                    Update_Date = dt.datetime.now().strftime(
                        "%Y-%m-%d %H:%M:%S")
                    Update_User = User
                    if header_corpid != CorpID:
                        #Create Header entry
                        header_corpid = CorpID
                        ForecastHeaderObj = m.ForecastHeaderRow(
                            WellName, CorpID, ForecastName, ForecastYear, '',
                            [], GFO, DBObj)
                        Success, Message = ForecastHeaderObj.Write(
                            Update_User, Update_Date)
                        if not Success:
                            Messages.append(Message)

                    Date_Key = row[1]['Date'].strftime('%m/%d/%Y')
                    Gas_Production = row[1]['GasProduction']
                    GasNF = row[1]['GasNF']
                    if Gas_Production >= 0 and Date_Key:
                        ForecastDataObj = m.ForecastDataRow(
                            ForecastName, CorpID, Date_Key, Gas_Production, 0,
                            0, GasNF, 0, 0, DBObj)
                        Success, Message = ForecastDataObj.Write(
                            Update_User, Update_Date)
                        if not Success:
                            Messages.append(Message)

                    df_previous_row = df_row

                well_count = well_count + 1

            callprogressbar(count, len(wellname_list))
            count = count + 1

    except Exception as ex:
        Success = False
        Messages.append('Error writing Forecast to Database. ' + str(ex))

    return Success, Messages