def fetchSection1_11_solar_cContext(appDbConnStr: str, startDt: dt.datetime, endDt: dt.datetime) -> ISection_1_11_solar_c:
    constituentsInfos = getREConstituentsMappings()
    mRepo = MetricsDataRepo(appDbConnStr)

    soFarHighestAllEntityVals = mRepo.getSoFarHighestAllEntityData(
        'soFarHighestSolarGen', startDt)
    soFarHighestWindLookUp = {}
    for v in soFarHighestAllEntityVals:
        soFarHighestWindLookUp[v['constituent']] = {
            'value': v['data_value'], 'ts': v['data_time']}

    dispRows: List[ISoFarHighestDataRow] = []
    for cIter in range(len(constituentsInfos)):
        constInfo = constituentsInfos[cIter]

        if(pd.isna(constInfo['solarCapacity'])):
            continue

        highestGenerationDateTime = soFarHighestWindLookUp[constInfo['entity_tag']]['ts']

        const_display_row: ISoFarHighestDataRow = {
            'entity': constInfo['display_name'],
            'capacityMW': constInfo['solarCapacity'],
            'generationMW': soFarHighestWindLookUp[constInfo['entity_tag']]['value'],
            'highestGenerationMWDateStr': dt.datetime.strftime(highestGenerationDateTime, "%d.%m.%Y"),
            'highestGenerationMWTimeStr': dt.datetime.strftime(highestGenerationDateTime, "%H:%M")
        }
        dispRows.append(const_display_row)

    secData: ISection_1_11_solar_c = {"so_far_hig_solar_gen": dispRows}
    return secData
def fetchIexGdamTableContext(appDbConnStr: str, startDt: dt.datetime, endDt: dt.datetime) -> ISection_1_4:
    mRepo = MetricsDataRepo(appDbConnStr)
    
    # get iex rtm data for the range between start date and end date ''
    iexGdamMcvVals = mRepo.getIexGdamBlockWiseData('MCV (MW)', startDt, endDt)
    iexGdamMcpVals = mRepo.getIexGdamBlockWiseData('MCP (Rs/MWh) ', startDt, endDt)
    iexGdamMcvDf = pd.DataFrame(iexGdamMcvVals)
    iexGdamMcpDf = pd.DataFrame(iexGdamMcpVals)

    tableDf = iexGdamMcvDf.groupby(['time_stamp']).mean()
    tableDf = tableDf.rename(columns={'data_value': 'avg_mcv_data'})
    tableDf.reset_index(inplace = True)
    minDf = iexGdamMcvDf.groupby(['time_stamp']).min()
    minDf.reset_index(inplace = True)
    minDf = minDf.rename(columns={'data_value': 'min_mcv_data'})
    tableDf = tableDf.merge(minDf[['min_mcv_data', 'time_stamp']], on = 'time_stamp')
    maxDf = iexGdamMcvDf.groupby(['time_stamp']).max()
    maxDf = maxDf.rename(columns={'data_value': 'max_mcv_data'})
    maxDf.reset_index(inplace = True)
    tableDf = tableDf.merge(maxDf[['max_mcv_data', 'time_stamp']], on = 'time_stamp')

    minMcpDf = iexGdamMcpDf.groupby(['time_stamp']).min()
    minMcpDf.reset_index(inplace = True)
    minMcpDf = minMcpDf.rename(columns={'data_value': 'min_mcp_data'})
    minMcpDf['min_mcp_data'] = minMcpDf['min_mcp_data']/1000
    tableDf = tableDf.merge(minMcpDf[['min_mcp_data', 'time_stamp']], on = 'time_stamp')
    maxMcpDf = iexGdamMcpDf.groupby(['time_stamp']).max()
    maxMcpDf.reset_index(inplace = True)
    maxMcpDf = maxMcpDf.rename(columns={'data_value': 'max_mcp_data'})
    maxMcpDf['max_mcp_data'] = maxMcpDf['max_mcp_data']/1000
    tableDf = tableDf.merge(maxMcpDf[['max_mcp_data', 'time_stamp']], on = 'time_stamp')
    avgMcpDf = iexGdamMcpDf.groupby(['time_stamp']).mean()
    avgMcpDf.reset_index(inplace = True)
    avgMcpDf = avgMcpDf.rename(columns={'data_value': 'avg_mcp_data'})
    avgMcpDf['avg_mcp_data'] = avgMcpDf['avg_mcp_data']/1000
    tableDf = tableDf.merge(avgMcpDf[['avg_mcp_data', 'time_stamp']], on = 'time_stamp')
    tableDf['rtm_energy'] = tableDf['avg_mcv_data']*24/1000


    iexGdamTableList: ISection_1_4()["iex_rtm_table"] = []

    for i in tableDf.index:
        iexGdamDailyRecord: IiexGdamRecord = {
            'date_time': dt.datetime.strftime(tableDf['time_stamp'][i], '%d-%m-%Y'),
            'min_mcv': round(tableDf['min_mcv_data'][i]),
            'max_mcv': round(tableDf['max_mcv_data'][i]),
            'avg_mcv': round(tableDf['avg_mcv_data'][i]),
            'min_mcp': round(tableDf['min_mcp_data'][i], 2),
            'max_mcp': round(tableDf['max_mcp_data'][i], 2),
            'avg_mcp': round(tableDf['avg_mcp_data'][i], 2),
            'rtm_energy': round(tableDf['rtm_energy'][i], 2)
        }
        iexGdamTableList.append(iexGdamDailyRecord)
    reportDt = dt.datetime(endDt.year, endDt.month, endDt.day)
    reportDt = dt.datetime.strftime(reportDt, '%Y-%m-%d')
    secData: ISection_1_4 = {
        'iex_gdam_table': iexGdamTableList
    }
    
    return secData
Exemplo n.º 3
0
 def test_getRawFreq(self) -> None:
     """tests the function that gets raw frequency data
     """
     appDbConnStr = self.jsonConf['appDbConnStr']
     mRepo = MetricsDataRepo(appDbConnStr)
     startDt = dt.datetime(2021, 2, 1)
     samples = mRepo.getRawFreq(startDt, startDt)
     self.assertFalse(len(samples) == 0)
Exemplo n.º 4
0
 def test_getSoFarHighestAllEntityData(self) -> None:
     """tests the function that gets hourly data of entity metric
     """
     appDbConnStr = self.jsonConf['appDbConnStr']
     mRepo = MetricsDataRepo(appDbConnStr)
     startDt = dt.datetime(2020, 12, 1)
     samples = mRepo.getSoFarHighestAllEntityData(
         "soFarHighestRequirement", startDt)
     self.assertFalse(len(samples) == 0)
Exemplo n.º 5
0
 def test_getDailyVoltDataByLevel(self) -> None:
     """tests the function that gets hourly data of entity metric
     """
     appDbConnStr = self.jsonConf['appDbConnStr']
     mRepo = MetricsDataRepo(appDbConnStr)
     startDt = dt.datetime(2020, 1, 1)
     endDt = dt.datetime(2020, 1, 10)
     samples = mRepo.getDailyVoltDataByLevel(
         765, "%Time >420 or 800", startDt, endDt)
     self.assertFalse(len(samples) == 0)
Exemplo n.º 6
0
 def test_getEntityMetricHourlyData(self) -> None:
     """tests the function that gets hourly data of entity metric
     """
     appDbConnStr = self.jsonConf['appDbConnStr']
     mRepo = MetricsDataRepo(appDbConnStr)
     startDt = dt.datetime(2020, 1, 1)
     endDt = dt.datetime(2020, 1, 10)
     samples = mRepo.getEntityMetricHourlyData(
         "wr", "Demand(MW)", startDt, endDt)
     self.assertFalse(len(samples) == 0)
Exemplo n.º 7
0
def fetchSection1_9Context(appDbConnStr: str, startDt: dt.datetime, endDt: dt.datetime) -> ISection_1_9:
    constituentsInfos = getConstituentsMappings()

    constConfig = {}
    for c in constituentsInfos:
        constConfig[c["entity_tag"]] = c["display_name"]

    dataRecords = pd.DataFrame()
    mRepo = MetricsDataRepo(appDbConnStr)
    # get schedule and drawal MUs values for this month
    allEntityScheduleMuVals = mRepo.getAllEntityMetricMonthlyData(
        'Schedule (MU)', startDt, endDt)
    allEntityDrawalMuVals = mRepo.getAllEntityMetricMonthlyData(
        'Drawl (MU)', startDt, endDt)

    allEntityScheduleMuDf = pd.DataFrame(allEntityScheduleMuVals)
    allEntityDrawalMuDf = pd.DataFrame(allEntityDrawalMuVals)
    allEntityScheduleMuDf = allEntityScheduleMuDf.rename(columns={
        'metric_value': 'Schedule (MU)'})
    allEntityDrawalMuDf = allEntityDrawalMuDf.rename(columns={
        'metric_value': 'Drawl (MU)'})
    tempList = allEntityDrawalMuDf['Drawl (MU)']
    allEntityScheduleMuDf['Drawl (MU)'] = tempList

    allEntityScheduleMuDf['difference'] = round(
                                        (allEntityScheduleMuDf['Drawl (MU)'] -
                                        allEntityScheduleMuDf['Schedule (MU)']), 2)
    # print(allEntityReqMuDf)
    dataRecords = allEntityScheduleMuDf

    newNames = []
    for rIter in range(dataRecords.shape[0]):
        row = dataRecords.iloc[rIter, :]
        if row['entity_tag'] in constConfig:
            newNames.append(constConfig[row['entity_tag']])
        else:
            newNames.append(np.nan)
    dataRecords['entity_tag'] = newNames

    scheduleDrawalList: ISection_1_9["schedule_drawal"] = []

    for i in dataRecords.index:
        scheduleDrawal: IScheduleDrawalDetails = {
            'entity': dataRecords['entity_tag'][i],
            'schedule': round(dataRecords['Schedule (MU)'][i], 2),
            'drawal': round(dataRecords['Drawl (MU)'][i], 2),
            'difference': round(dataRecords['difference'][i], 2)
        }
        scheduleDrawalList.append(scheduleDrawal)
    
    sectionData: ISection_1_9 = {
        "schedule_drawal": scheduleDrawalList
    }

    return sectionData
Exemplo n.º 8
0
def fetchSection2_1_LoadDurationCurve(appDbConnStr: str, startDt: dt.datetime, endDt: dt.datetime) -> dict:
    
    mRepo = MetricsDataRepo(appDbConnStr)

    currentMonthMW = mRepo.getEntityMetricHourlyData('wr','Demand(MW)', startDt , endDt)
    df = pd.DataFrame(currentMonthMW)
    currentMonthMWVals = deriveDurationVals(df['data_value'],100)

    pastMonthMW = mRepo.getEntityMetricHourlyData('wr','Demand(MW)',addMonths(startDt,-1) , addMonths(endDt,-1))
    df = pd.DataFrame(pastMonthMW)
    pastMonthMWVals = deriveDurationVals(df['data_value'],100)

    pastYearMW = mRepo.getEntityMetricHourlyData('wr','Demand(MW)',addMonths(startDt,-12) , addMonths(endDt,-12))
    df = pd.DataFrame(pastYearMW)
    pastYearMWVals = deriveDurationVals(df['data_value'],100)

    pltTitle = 'Load Duration Curve {0}, {1} & {2}'.format(startDt.strftime('%b-%y') , addMonths(startDt,-1).strftime('%b-%y') , addMonths(startDt,-12).strftime('%b-%y'))

    fig, ax = plt.subplots(figsize=(7.5, 4.5))

    ax.set_title(pltTitle)
    ax.set_ylabel('Demand met (MW)')
    ax.set_xlabel('% of time')

    # ax.xaxis.set_major_locator(mdates.DayLocator(interval=2))
    # ax.xaxis.set_major_formatter(mdates.DateFormatter('%d'))

    
    ax.plot( currentMonthMWVals['perc_exceeded'],currentMonthMWVals['bins'], color='red',label=dt.datetime.strftime(startDt, '%b-%y'))
    ax.plot( pastMonthMWVals['perc_exceeded'] , pastMonthMWVals['bins'],color='blue', label=addMonths(startDt,-1).strftime('%b-%y'))
    ax.plot( pastYearMWVals['perc_exceeded'] , pastYearMWVals['bins'], color='green', label=addMonths(startDt,-12).strftime('%b-%y'))


    ax.yaxis.grid(True)
    ax.xaxis.grid(True)
    ax.legend( loc='best',
                    ncol=4, borderaxespad=0.)


    plt.xticks(np.arange(0,110,10))
    ax.set_xlim(xmin=0 , xmax= 100)
    ax.set_ylim(ymin=35000, ymax=65000)
    fig.subplots_adjust(bottom=0.25, top=0.8)
    
    ax.set_facecolor("#cbffff")

    fig.savefig('assets/section_2_1_loadDurationCurve.png')
    # plt.show()
    # plt.close()

    secData: dict = {}
    return secData
def fetchSection1_1_2Context(appDbConnStr: str, startDt: dt.datetime,
                             endDt: dt.datetime) -> ISection_1_1_2:

    mRepo = MetricsDataRepo(appDbConnStr)
    # get WR Unrestricted demand hourly values for this month and prev yr month
    wrDemVals = mRepo.getEntityMetricHourlyData('wr', 'Demand(MW)', startDt,
                                                endDt)

    wrPeakDemDf = pd.DataFrame(wrDemVals)
    wrPeakDemDf = wrPeakDemDf.pivot(index='time_stamp',
                                    columns='metric_name',
                                    values='data_value')

    lastYrStartDt = addMonths(startDt, -12)
    lastYrEndDt = addMonths(endDt, -12)
    # last_yr_month_name = dt.datetime.strftime(lastYrStartDt, "%b %y")

    wrLastYrDemVals = mRepo.getEntityMetricHourlyData('wr', 'Demand(MW)',
                                                      lastYrStartDt,
                                                      lastYrEndDt)

    wrLastYrPeakDemDf = pd.DataFrame(wrLastYrDemVals)
    wrLastYrPeakDemDf = wrLastYrPeakDemDf.pivot(index='time_stamp',
                                                columns='metric_name',
                                                values='data_value')

    wr_peak_dem = round(wrPeakDemDf['Demand(MW)'].max())
    maxPeakDemDt = wrPeakDemDf['Demand(MW)'].idxmax()
    wr_peak_dem_time_str = "{0} Hrs on {1}".format(
        dt.datetime.strftime(maxPeakDemDt, "%H:%M"),
        dt.datetime.strftime(maxPeakDemDt, "%d-%b-%y"))

    wr_peak_dem_last_yr = round(wrLastYrPeakDemDf['Demand(MW)'].max())

    wr_peak_dem_perc_inc = 100 * \
        (wr_peak_dem - wr_peak_dem_last_yr)/wr_peak_dem_last_yr
    wr_peak_dem_perc_inc = round(wr_peak_dem_perc_inc, 2)

    wr_avg_dem = round(wrPeakDemDf['Demand(MW)'].mean())
    wr_avg_dem_last_yr = round(wrLastYrPeakDemDf['Demand(MW)'].mean())
    wr_avg_dem_perc_inc = round(
        100 * (wr_avg_dem - wr_avg_dem_last_yr) / wr_avg_dem_last_yr, 2)
    secData: ISection_1_1_2 = {
        'wr_peak_dem_met': wr_peak_dem,
        'wr_peak_dem_time_str': wr_peak_dem_time_str,
        'wr_peak_dem_perc_inc': wr_peak_dem_perc_inc,
        'wr_last_year_peak_dem': wr_peak_dem_last_yr,
        'wr_avg_dem': wr_avg_dem,
        'wr_avg_dem_last_yr': wr_avg_dem_last_yr,
        'wr_avg_dem_perc_inc': wr_avg_dem_perc_inc
    }
    return secData
Exemplo n.º 10
0
def fetchSection2_1FrequencyDurationCurve(appDbConnStr: str, startDt: dt.datetime, endDt: dt.datetime) -> dict:

    mRepo = MetricsDataRepo(appDbConnStr)

    frequencyData = mRepo.getRawFreq(startDt,endDt)
    df = pd.DataFrame(frequencyData)
    frequencyDataVals = deriveDurationVals(df['frequency'],0.01)
    
    maxFreq = round(df['frequency'].max(),2)
    minFreq = round(df['frequency'].min(),2)
    meanFreq = round(df['frequency'].mean(),2)

    pltTitle = 'Frequency Duration Curve for {0} Max={1} , Min={2} , Avg={3} '.format(startDt.strftime('%b-%y') , maxFreq,minFreq,meanFreq)

    fig, ax = plt.subplots(figsize=(7.5, 4.5))

    ax.set_title(pltTitle)
    ax.set_ylabel('Freq (HZ)')
    ax.set_xlabel('% of time')

    # ax.xaxis.set_major_locator(mdates.DayLocator(interval=2))
    # ax.xaxis.set_major_formatter(mdates.DateFormatter('%d'))

    
    ax.plot( frequencyDataVals['perc_exceeded'],frequencyDataVals['bins'], color='orange')
    

    ax.yaxis.grid(True)
    ax.xaxis.grid(True)
    ax.legend( loc='best',
                    ncol=4, borderaxespad=0.)


    plt.xticks(np.arange(0,110,10))
    ax.set_xlim(xmin=0 , xmax= 100)
    ax.set_ylim(ymin=49.6, ymax=50.4)
    fig.subplots_adjust(bottom=0.25, top=0.8)

    ax.set_facecolor("#ffffcc")
    fig.patch.set_facecolor('#cbcbcb')

    fig.savefig('assets/section_2_2_frequencyDurationCurve.png')
    # plt.show()
    # plt.close()

    secData: dict = {}
    return secData
def fetchSection1_13Context(appDbConnStr:str, filePath: str, startDt: dt.datetime, endDt: dt.datetime):
   
    mRepo = MetricsDataRepo(appDbConnStr)
   
    allRecords = mRepo.getRRASData(filePath , startDt , endDt)
    
    upRecords = allRecords['up']
    downRecords = allRecords['down']


    sectionData = {}

    sectionData['rrasUp'] = CreateListUP(upRecords)
    sectionData['rrasDown'] = CreateListDown(downRecords)

    
    return sectionData
Exemplo n.º 12
0
def fetchSection1_1_4Context(appDbConnStr: str, startDt: dt.datetime,
                             endDt: dt.datetime) -> ISection_1_1_4:
    monthDtObj = dt.datetime(startDt.year, startDt.month, 1)
    month_name = dt.datetime.strftime(startDt, "%b %y")
    mRepo = MetricsDataRepo(appDbConnStr)
    # get WR Unrestricted demand hourly values for this month and prev yr month
    wrConMuVals = mRepo.getEntityMetricDailyData('wr', 'Requirement (MU)',
                                                 startDt, endDt)
    wrConMuDf = pd.DataFrame(wrConMuVals)
    wrConMuDf = wrConMuDf.pivot(index='time_stamp',
                                columns='metric_name',
                                values='data_value')

    lastYrStartDt = addMonths(startDt, -12)
    lastYrEndDt = addMonths(endDt, -12)
    last_yr_month_name = dt.datetime.strftime(lastYrStartDt, "%b %y")
    wrLastYrConMuVals = mRepo.getEntityMetricDailyData('wr',
                                                       'Requirement (MU)',
                                                       lastYrStartDt,
                                                       lastYrEndDt)
    wrLastYrConMuDf = pd.DataFrame(wrLastYrConMuVals)
    wrLastYrConMuDf = wrLastYrConMuDf.pivot(index='time_stamp',
                                            columns='metric_name',
                                            values='data_value')

    wr_tot_req_mu = round(wrConMuDf['Requirement (MU)'].sum())
    wr_avg_req_mu = round(wrConMuDf['Requirement (MU)'].mean())
    wr_max_req_mu = round(wrConMuDf['Requirement (MU)'].max())
    wrMaxConsMuDate = wrConMuDf['Requirement (MU)'].idxmax()
    wr_avg_req_mu_last_yr = round(wrLastYrConMuDf['Requirement (MU)'].mean())

    wr_max_req_mu_date = "{0}".format(
        dt.datetime.strftime(wrMaxConsMuDate, "%d-%b-%y"))

    wr_avg_req_mu_perc_inc = round(
        100 * (wr_avg_req_mu - wr_avg_req_mu_last_yr) / wr_avg_req_mu_last_yr,
        2)
    secData: ISection_1_1_4 = {
        'wr_tot_req_mu': wr_tot_req_mu,
        'wr_avg_req_mu': wr_avg_req_mu,
        'wr_max_req_mu': wr_max_req_mu,
        'wr_max_req_mu_date': wr_max_req_mu_date,
        'wr_avg_req_mu_perc_inc': wr_avg_req_mu_perc_inc,
        'wr_avg_req_mu_last_yr': wr_avg_req_mu_last_yr
    }
    return secData
Exemplo n.º 13
0
def fetchSection1_1_hydroContext(appDbConnStr: str, startDt: dt.datetime,
                                 endDt: dt.datetime) -> ISection_1_1_hydro:
    mRepo = MetricsDataRepo(appDbConnStr)
    # get hydro mu
    hydroMuVals = mRepo.getEntityMetricDailyData("wr", "Hydro(MU)", startDt,
                                                 endDt)
    cgsHrdroMuVals = mRepo.getEntityMetricDailyData("wr", "CGS Hydro(Mus)",
                                                    startDt, endDt)

    maxHydroMuObj = hydroMuVals[0]
    tot_month_hydro_gen_mu = 0
    for h, c in zip(hydroMuVals, cgsHrdroMuVals):
        tot_month_hydro_gen_mu += h["data_value"] + c["data_value"]
        iterationSum = h["data_value"] + c["data_value"]
        if iterationSum > maxHydroMuObj["data_value"]:
            maxHydroMuObj["data_value"] = iterationSum
    avg_month_hydro_gen_mu = tot_month_hydro_gen_mu / len(hydroMuVals)
    max_month_hydro_gen_mu = round(maxHydroMuObj["data_value"], 2)
    max_month_hydro_gen_mu_date = dt.datetime.strftime(
        maxHydroMuObj["time_stamp"], "%d-%b-%Y")

    # get hydro mu for last year
    hydroMuLastYrVals = mRepo.getEntityMetricDailyData("wr", "Hydro(MU)",
                                                       addMonths(startDt, -12),
                                                       addMonths(endDt, -12))
    cgsHrdroMuLastYrVals = mRepo.getEntityMetricDailyData(
        "wr", "CGS Hydro(Mus)", addMonths(startDt, -12), addMonths(endDt, -12))
    tot_last_year_hydro_gen_mu = 0
    for h, c in zip(hydroMuLastYrVals, cgsHrdroMuLastYrVals):
        tot_last_year_hydro_gen_mu += h["data_value"] + c["data_value"]

    tot_last_year_hydro_gen_mu_perc = round(
        100 * (tot_month_hydro_gen_mu - tot_last_year_hydro_gen_mu) /
        tot_last_year_hydro_gen_mu, 2)

    tot_last_year_hydro_gen_mu_perc_str = ""
    if tot_last_year_hydro_gen_mu_perc < 0:
        tot_last_year_hydro_gen_mu_perc_str = "reduced by {0}%".format(
            -1 * tot_last_year_hydro_gen_mu_perc)
    else:
        tot_last_year_hydro_gen_mu_perc_str = "increased by {0}%".format(
            tot_last_year_hydro_gen_mu_perc)

    secData: ISection_1_1_hydro = {
        "tot_month_hydro_gen_mu": round(tot_month_hydro_gen_mu),
        "tot_last_year_hydro_gen_mu": round(tot_last_year_hydro_gen_mu),
        "tot_last_year_hydro_gen_mu_perc_str":
        tot_last_year_hydro_gen_mu_perc_str,
        "avg_month_hydro_gen_mu": round(avg_month_hydro_gen_mu),
        "max_month_hydro_gen_mu": round(max_month_hydro_gen_mu),
        "max_month_hydro_gen_mu_date": max_month_hydro_gen_mu_date
    }
    return secData
Exemplo n.º 14
0
def fetchReservoirMonthlyTableContext(
        appDbConnStr: str, startDt: dt.datetime,
        endDt: dt.datetime) -> IReservoirMonthlyDataRecord:
    # reservoirInfo = getReservoirsMappings()

    mRepo = MetricsDataRepo(appDbConnStr)
    finYrStartDt = getFinYrDt(startDt)
    mRepo = MetricsDataRepo(appDbConnStr)
    reservoirMonthlyVals = mRepo.getReservoirMonthlyData(finYrStartDt, endDt)
    reservoirMonthlyDf = pd.DataFrame(reservoirMonthlyVals)
    reservoirMonthlyDf['day'] = 1
    # loadDurationCurve = fetchSection2_1_LoadDurationCurve(appDbConnStr ,startDt, endDt)
    reservoirMonthlyDf['Date'] = pd.to_datetime(
        reservoirMonthlyDf[['year', 'month', 'day']])
    # reservoirMonthlyDf['Date'] = reservoirMonthlyDf['Date'].dt.strftime('%d-%m-%y')
    reservoirMonthlyDf = reservoirMonthlyDf.drop(['year', 'month', 'day'],
                                                 axis=1)
    reservoirMonthlyDf = reservoirMonthlyDf.pivot(index='Date',
                                                  columns='entity_tag',
                                                  values='level_max')
    reservoirMonthlyDf = reservoirMonthlyDf.rename(
        columns={
            'Gandhi Sagar Reservoir Level ': 'a_gandhi_sagar',
            'Indira sagar Reservoir Level': 'a_indira_sagar',
            'Omkareshwar Reservoir Level': 'a_omkareshwar',
            'Kadana Reservoir Level': 'b_kadana',
            'ssp': 'b_ssp',
            'Ukai Reservoir Level': 'b_ukai',
            'Koyna Reservoir Level ': 'c_koyna'
        })

    reservoirMonthlyDf = reservoirMonthlyDf.reindex(sorted(
        reservoirMonthlyDf.columns),
                                                    axis=1)
    reservoirMonthlyDf.reset_index(inplace=True)
    reservoirMonthlyDf['Date'] = reservoirMonthlyDf['Date'].dt.strftime(
        '%b %y')
    # reservoirMonthlyDf = reservoirMonthlyDf.drop(['Date'], axis=1)
    # print("testing")

    reservoirTableList: ISection_reservoir_table["schedule_drawal"] = []

    for i in reservoirMonthlyDf.index:
        reservoirTableRecord: IReservoirMonthlyDataRecord = {
            # 'date_time': dt.datetime.strftime(reservoirMonthlyDf['Date'][i], '%d-%m-%Y'),
            'date_time': reservoirMonthlyDf['Date'][i],
            'gandhi': reservoirMonthlyDf['a_gandhi_sagar'][i],
            'indira': round(reservoirMonthlyDf['a_indira_sagar'][i]),
            'omkare': round(reservoirMonthlyDf['a_omkareshwar'][i]),
            'kadana': round(reservoirMonthlyDf['b_kadana'][i]),
            'ssp': round(reservoirMonthlyDf['b_ssp'][i]),
            'ukai': round(reservoirMonthlyDf['b_ukai'][i]),
            'koyna': round(reservoirMonthlyDf['c_koyna'][i])
        }
        reservoirTableList.append(reservoirTableRecord)

    sectionData: ISection_reservoir_table = {
        "reservoir_table": reservoirTableList
    }

    return sectionData
def fetchSection1_4_2Context(appDbConnStr: str, startDt: dt.datetime,
                             endDt: dt.datetime) -> ISection_1_4_2:
    monthName = dt.datetime.strftime(startDt, "%b %y")
    mRepo = MetricsDataRepo(appDbConnStr)
    # get WR demand hourly values for this month and prev yr month
    wrDemVals = mRepo.getEntityMetricHourlyData('wr', 'Demand(MW)', startDt,
                                                endDt)
    demVals = [x['data_value'] for x in wrDemVals]
    wr_max_dem = max(demVals)
    wrMaxDemDt = wrDemVals[demVals.index(wr_max_dem)]['time_stamp']
    wr_max_dem_date_str = dt.datetime.strftime(wrMaxDemDt, "%d-%b-%y")
    wr_avg_dem = sum(demVals) / len(demVals)
    wrMaxDemTimestampStr = dt.datetime.strftime(wrMaxDemDt,
                                                "%d-%b-%y %H:%M") + " hrs"

    lastYrStartDt = addMonths(startDt, -12)
    lastYrEndDt = addMonths(endDt, -12)
    monthNameLastYear = dt.datetime.strftime(lastYrStartDt, "%b %y")
    wrLastYrDemVals = mRepo.getEntityMetricHourlyData('wr', 'Demand(MW)',
                                                      lastYrStartDt,
                                                      lastYrEndDt)
    demVals = [x['data_value'] for x in wrLastYrDemVals]
    wr_max_dem_last_year = max(demVals)
    wr_max_dem_date_str_last_year = dt.datetime.strftime(
        wrLastYrDemVals[demVals.index(wr_max_dem_last_year)]['time_stamp'],
        "%d-%b-%y")
    wr_avg_dem_last_year = sum(demVals) / len(demVals)

    wr_avg_dem_perc_change_last_year = round(
        100 * (wr_avg_dem - wr_avg_dem_last_year) / wr_avg_dem_last_year, 2)
    wr_max_dem_perc_change_last_year = round(
        100 * (wr_max_dem - wr_max_dem_last_year) / wr_max_dem_last_year, 2)

    prevMonthStartDt = addMonths(startDt, -1)
    prevMonthEndDt = addMonths(endDt, -1)
    prev_month_name = dt.datetime.strftime(prevMonthStartDt, "%b %y")
    wrPrevMonthDemVals = mRepo.getEntityMetricHourlyData(
        'wr', 'Demand(MW)', prevMonthStartDt, prevMonthEndDt)
    demVals = [x['data_value'] for x in wrPrevMonthDemVals]
    wr_max_dem_prev_month = max(demVals)
    wr_max_dem_date_str_prev_month = dt.datetime.strftime(
        wrPrevMonthDemVals[demVals.index(wr_max_dem_prev_month)]['time_stamp'],
        "%d-%b-%y")
    wr_avg_dem_prev_month = sum(demVals) / len(demVals)

    wr_avg_dem_perc_change_prev_month = round(
        100 * (wr_avg_dem - wr_avg_dem_prev_month) / wr_avg_dem_prev_month, 2)
    wr_max_dem_perc_change_prev_month = round(
        100 * (wr_max_dem - wr_max_dem_prev_month) / wr_max_dem_prev_month, 2)

    # create plot image for demands of prev yr, prev month, this month
    pltDemObjs = [{
        'Date': convertDtToDayNum(x["time_stamp"]),
        'colName': monthName,
        'val': x["data_value"]
    } for x in wrDemVals]
    pltDemObjsLastYear = [{
        'Date': convertDtToDayNum(x["time_stamp"]),
        'colName': monthNameLastYear,
        'val': x["data_value"]
    } for x in wrLastYrDemVals]
    pltDemObjsPrevMonth = [{
        'Date': convertDtToDayNum(x["time_stamp"]),
        'colName': prev_month_name,
        'val': x["data_value"]
    } for x in wrPrevMonthDemVals]
    pltDataObjs = pltDemObjs + pltDemObjsLastYear + pltDemObjsPrevMonth

    pltDataDf = pd.DataFrame(pltDataObjs)
    pltDataDf = pltDataDf.pivot(index='Date', columns='colName', values='val')
    pltDataDf.reset_index(inplace=True)
    pltDataDf["Date"] = [math.floor(x) for x in pltDataDf["Date"]]
    pltDataDf = pltDataDf.groupby(by="Date").max()
    # save plot data as excel
    pltDataDf.to_excel("assets/plot_1_4_2.xlsx", index=True)

    # derive plot title
    pltTitle = 'Demand met {0}, {1} & {2} \n Max. {3} MW on dt. {4} \n Average Load Growth {5}{6} against last year'.format(
        monthName, prev_month_name, monthNameLastYear,
        format(round(wr_max_dem), ","), wrMaxDemTimestampStr,
        wr_avg_dem_perc_change_last_year, "%")

    # create a plotting area and get the figure, axes handle in return
    fig, ax = plt.subplots(figsize=(7.5, 4.5))
    # set plot title
    ax.set_title(pltTitle)
    # set x and y labels
    ax.set_xlabel('Date')
    ax.set_ylabel('MW')
    ax.set_facecolor("#c6d9f1")
    fig.patch.set_facecolor('#fac090')
    # plot data and get the line artist object in return
    laThisMonth, = ax.plot(pltDataDf.index.values,
                           pltDataDf[monthName].values,
                           color='#ff0000')
    laThisMonth.set_label(monthName)

    laLastYear, = ax.plot(pltDataDf.index.values,
                          pltDataDf[monthNameLastYear].values,
                          color='#00ff00')
    laLastYear.set_label(monthNameLastYear)

    laPrevMonth, = ax.plot(pltDataDf.index.values,
                           pltDataDf[prev_month_name].values,
                           color='#A52A2A')
    laPrevMonth.set_label(prev_month_name)

    ax.set_xlim((1, 31), auto=True)
    # enable y axis grid lines
    ax.yaxis.grid(True)
    # enable legends
    ax.legend(bbox_to_anchor=(0.0, -0.3, 1, 0),
              loc='lower center',
              ncol=3,
              mode="expand",
              borderaxespad=0.)
    fig.subplots_adjust(bottom=0.25, top=0.8)
    fig.savefig('assets/section_1_4_2.png')

    secData: ISection_1_4_2 = {
        'prev_month_name': prev_month_name,
        'wr_max_dem': round(wr_max_dem),
        'wr_max_dem_date_str': wr_max_dem_date_str,
        'wr_avg_dem': round(wr_avg_dem),
        'wr_max_dem_last_year': round(wr_max_dem_last_year),
        'wr_max_dem_date_str_last_year': wr_max_dem_date_str_last_year,
        'wr_avg_dem_last_year': round(wr_avg_dem_last_year),
        'wr_avg_dem_perc_change_last_year': wr_avg_dem_perc_change_last_year,
        'wr_max_dem_perc_change_last_year': wr_max_dem_perc_change_last_year,
        'wr_max_dem_prev_month': round(wr_max_dem_prev_month),
        'wr_max_dem_date_str_prev_month': wr_max_dem_date_str_prev_month,
        'wr_avg_dem_prev_month': round(wr_avg_dem_prev_month),
        'wr_avg_dem_perc_change_prev_month': wr_avg_dem_perc_change_prev_month,
        'wr_max_dem_perc_change_prev_month': wr_max_dem_perc_change_prev_month
    }
    return secData
def fetchSection1_7_2Context(appDbConnStr: str, startDt: dt.datetime, endDt: dt.datetime) -> ISection_1_7_2:
    mRepo = MetricsDataRepo(appDbConnStr)

    # get voltage data for this month
    maxVoltData = mRepo.getDailyVoltDataByLevel(400, "Max", startDt, endDt)
    maxVoltDf = pd.DataFrame(maxVoltData)
    maxVoltDf["data_val"] = pd.to_numeric(
        maxVoltDf["data_val"], errors='coerce')
    maxVoltSeries = maxVoltDf.groupby("entity_name").apply(getMax)
    maxVoltSeries = maxVoltSeries.round()
    maxVoltSeries = maxVoltSeries.rename("max_vol")

    minVoltData = mRepo.getDailyVoltDataByLevel(400, "Min", startDt, endDt)
    minVoltDf = pd.DataFrame(minVoltData)
    minVoltDf["data_val"] = pd.to_numeric(
        minVoltDf["data_val"], errors='coerce')
    minVoltSeries = minVoltDf.groupby("entity_name").apply(getMin)
    minVoltSeries = minVoltSeries.round()
    minVoltSeries = minVoltSeries.rename("min_vol")

    lessVoltPercData = mRepo.getDailyVoltDataByLevel(
        400, "%Time <380 or 728", startDt, endDt)
    lessVoltPercDf = pd.DataFrame(lessVoltPercData)
    lessVoltPercDf["data_val"] = pd.to_numeric(
        lessVoltPercDf["data_val"], errors='coerce')
    lessVoltPercSeries = lessVoltPercDf.groupby("entity_name").apply(getMean)
    lessVoltPercSeries = lessVoltPercSeries.round(2)
    lessVoltPercSeries = lessVoltPercSeries.rename("less_perc")

    bandVoltPercData = mRepo.getDailyVoltDataByLevel(
        400, "%Time within IEGC Band", startDt, endDt)
    bandVoltPercDf = pd.DataFrame(bandVoltPercData)
    bandVoltPercDf["data_val"] = pd.to_numeric(
        bandVoltPercDf["data_val"], errors='coerce')
    bandVoltPercSeries = bandVoltPercDf.groupby("entity_name").apply(getMean)
    bandVoltPercSeries = bandVoltPercSeries.round(2)
    bandVoltPercSeries = bandVoltPercSeries.rename("band_perc")

    moreVoltPercData = mRepo.getDailyVoltDataByLevel(
        400, "%Time >420 or 800", startDt, endDt)
    moreVoltPercDf = pd.DataFrame(moreVoltPercData)
    moreVoltPercDf["data_val"] = pd.to_numeric(
        moreVoltPercDf["data_val"], errors='coerce')
    moreVoltPercSeries = moreVoltPercDf.groupby("entity_name").apply(getMean)
    moreVoltPercSeries = moreVoltPercSeries.round(2)
    moreVoltPercSeries = moreVoltPercSeries.rename("more_perc")

    numMonthHrs = (endDt - startDt).total_seconds()/(60*60)

    secDf = pd.concat([maxVoltSeries, minVoltSeries, lessVoltPercSeries,
                       bandVoltPercSeries, moreVoltPercSeries], axis=1)
    secDf['less_hrs'] = secDf['less_perc']*(numMonthHrs*0.01)
    secDf['more_hrs'] = secDf['more_perc']*(numMonthHrs*0.01)
    secDf['out_hrs'] = secDf['less_hrs'] + secDf['more_hrs']
    secDf['vdi'] = secDf['out_hrs']*(1/numMonthHrs)
    secDf['less_hrs'] = secDf['less_hrs'].apply(hrsToDurationStr)
    secDf['more_hrs'] = secDf['more_hrs'].apply(hrsToDurationStr)
    secDf['out_hrs'] = secDf['out_hrs'].apply(hrsToDurationStr)
    secDf['vdi'] = secDf['vdi'].apply(hrsToDurationStr)
    secDf.reset_index(inplace=True)
    secDf['entity_name'] = secDf['entity_name'].apply(strip400)
    secDf.rename(columns={"entity_name": "station"}, inplace=True)
    secDataRows = secDf.to_dict('records')

    sectionData: ISection_1_7_2 = {
        'voltVdiProfile400': secDataRows
    }

    return sectionData
def fetchSection1_11_windPLF(appDbConnStr: str, startDt: dt.datetime,
                             endDt: dt.datetime) -> ISection_1_11_PLFCUF:
    constituentsInfos = getREConstituentsMappings()
    mRepo = MetricsDataRepo(appDbConnStr)

    numOfDays = (endDt - startDt).days + 1

    soFarHighestAllEntityGenVals = mRepo.getSoFarHighestAllEntityData(
        'soFarHighestWindGen', addMonths(startDt, -1))
    soFarHighestGenLookUp = {}
    for v in soFarHighestAllEntityGenVals:
        soFarHighestGenLookUp[v['constituent']] = {
            'value': v['data_value'],
            'ts': v['data_time']
        }

    dispRows: List[IPLFCUFDataRow] = []
    for cIter in range(len(constituentsInfos)):
        constInfo = constituentsInfos[cIter]

        if (math.isnan(constInfo['windCapacity'])):
            continue

        maxGenData = mRepo.getEntityMetricHourlyData(constInfo["entity_tag"],
                                                     "Wind(MW)", startDt,
                                                     endDt)

        if constInfo['entity_tag'] == 'central':
            windEnerConsumption = mRepo.getEntityMetricDailyData(
                'wr', 'CGS Wind(Mus)', startDt, endDt)
        elif constInfo['entity_tag'] == 'wr':
            windEnerConsumption = mRepo.getEntityMetricDailyData(
                'wr', "Wind(MU)", startDt, endDt)
            cgsWindEnerConsumption = mRepo.getEntityMetricDailyData(
                'wr', 'CGS Wind(Mus)', startDt, endDt)
            for w, c in zip(windEnerConsumption, cgsWindEnerConsumption):
                w["data_value"] += c["data_value"]
        else:
            windEnerConsumption = mRepo.getEntityMetricDailyData(
                constInfo['entity_tag'], 'Wind(MU)', startDt, endDt)

        energyConsumption = mRepo.getEntityMetricDailyData(
            constInfo['entity_tag'], 'Consumption(MU)', startDt, endDt)

        maxGenDf = pd.DataFrame(maxGenData)
        maxGenDf = maxGenDf.pivot(index='time_stamp',
                                  columns='metric_name',
                                  values='data_value')
        windEnerConsumptionSum = pd.DataFrame(windEnerConsumption).groupby(
            'entity_tag').sum().iloc[0]['data_value']

        if (len(energyConsumption) == 0):
            # This is for central sector as we dont have consumption data
            # calculate mu from mw
            # df = pd.DataFrame(maxGenData)
            # average = df.groupby('entity_tag').mean()
            # windEnerConsumptionSumDf = average * 0.024 * numOfDays #To Calculate Avg MU from MW
            # windEnerConsumptionSum = windEnerConsumptionSumDf.iloc[0]['data_value']
            EnerConsumptionSum = 0
            penetrationLevel = 0
        else:
            EnerConsumptionSum = pd.DataFrame(energyConsumption).groupby(
                'entity_tag').sum().iloc[0]['data_value']
            penetrationLevel = round(
                (windEnerConsumptionSum / EnerConsumptionSum) * 100, 2)

        maxWind = maxGenDf["Wind(MW)"].max()
        maxWindDt = maxGenDf["Wind(MW)"].idxmax()

        plf = (windEnerConsumptionSum *
               1000) / (int(constInfo['windCapacity']) * 24 * numOfDays)
        cuf = maxWind * 100 / (int(constInfo['windCapacity']))

        prevHighestWindObj = soFarHighestGenLookUp[constInfo["entity_tag"]]
        newHighestWind = maxWind
        newHighestWindTime = maxWindDt.to_pydatetime()

        if newHighestWind < prevHighestWindObj["value"]:
            newHighestWind = prevHighestWindObj["value"]
            newHighestWindTime = prevHighestWindObj["ts"]

        mRepo.insertSoFarHighest(constInfo['entity_tag'],
                                 "soFarHighestWindGen", startDt,
                                 newHighestWind, newHighestWindTime)
        # soFarHighestAllEntityGenVals = mRepo.getSoFarHighestAllEntityData(
        # 'soFarHighestWindGen', startDt)
        # soFarHighestGenLookUp = {}
        # for v in soFarHighestAllEntityGenVals:
        #     soFarHighestGenLookUp[v['constituent']] = {
        #     'value': v['data_value'], 'ts': v['data_time']}
        so_far_high_gen_str = str(
            round(newHighestWind)) + ' on ' + dt.datetime.strftime(
                newHighestWindTime,
                '%d-%b-%Y') + ' at ' + dt.datetime.strftime(
                    newHighestWindTime, '%H:%S')

        const_display_row: IPLFCUFDataRow = {
            'entity': constInfo['display_name'],
            'capacityMW': round(constInfo['windCapacity']),
            'maxgenerationMW': round(maxWind),
            'soFarHighestGenMW': so_far_high_gen_str,
            'energyGeneration': round(windEnerConsumptionSum),
            'energyConsumption': round(EnerConsumptionSum),
            'penetration': penetrationLevel,
            'plf': round(plf * 100),
            'cuf': round(cuf)
        }
        dispRows.append(const_display_row)

    secData: ISection_1_11_PLFCUF = {"so_far_hig_win_gen_plf": dispRows}
    return secData
def fetchSection1_1_freq_Context(appDbConnStr: str, startDt: dt.datetime,
                                 endDt: dt.datetime) -> ISection_1_1_freq:
    monthDtObj = dt.datetime(startDt.year, startDt.month, 1)
    mRepo = MetricsDataRepo(appDbConnStr)
    # get WR Unrestricted demand hourly values for this month and prev yr month
    wrFreqBetBandVals = mRepo.getFreqDailyData('>= 49.9 - <= 50.05', startDt,
                                               endDt)
    wrFreqBetBandDf = pd.DataFrame(wrFreqBetBandVals)
    wrFreqBetBandDf.set_index('time_stamp', inplace=True)
    wrFreqBetBandDf['data_value'] = pd.to_numeric(
        wrFreqBetBandDf['data_value'])
    bet_band = round(wrFreqBetBandDf['data_value'].mean(), 3)

    wrAvgFreqVals = mRepo.getFreqDailyData('avg frq', startDt, endDt)
    wrAvgFreqDf = pd.DataFrame(wrAvgFreqVals)
    wrAvgFreqDf.set_index('time_stamp', inplace=True)
    wrAvgFreqDf['data_value'] = pd.to_numeric(wrAvgFreqDf['data_value'])
    avg_freq = round(wrAvgFreqDf['data_value'].mean(), 3)

    wrFdiVals = mRepo.getFreqDailyData('Hrs Out of IEGC', startDt, endDt)
    wrFdiVals = pd.DataFrame(wrFdiVals)
    wrFdiVals['data_value'] = pd.to_numeric(wrFdiVals['data_value'])
    wrFdiVals['data_value'] = wrFdiVals['data_value'] / 24
    fdi = round(wrFdiVals['data_value'].mean(), 2)
    # print(fdi)

    wrMaxFreqVals = mRepo.getFreqDailyData('max inst f', startDt, endDt)
    wrMaxFreqDf = pd.DataFrame(wrMaxFreqVals)
    wrMaxFreqDf.set_index('time_stamp', inplace=True)
    wrMaxFreqDf['data_value'] = pd.to_numeric(wrMaxFreqDf['data_value'])
    wrMaxFreqDate = wrMaxFreqDf['data_value'].idxmax()
    max_freq = round(wrMaxFreqDf['data_value'].max(), 2)

    wrMaxFreqTimeVals = mRepo.getFreqDailyData('time max f', startDt, endDt)
    wrMaxFreqTimeDf = pd.DataFrame(wrMaxFreqTimeVals)
    wrMaxFreqTimeDf.set_index('time_stamp', inplace=True)
    # wrMaxFreqDate = dt.datetime.strftime(wrMaxFreqDate, "%Y-%m-%d %H:%M:%S")
    wrMaxFreqTime = wrMaxFreqTimeDf.loc[wrMaxFreqDate]['data_value']

    max_freq_time_str = "{0} at {1}".format(
        dt.datetime.strftime(wrMaxFreqDate, "%d-%b-%y"), wrMaxFreqTime)

    wrMinFreqVals = mRepo.getFreqDailyData('min inst f', startDt, endDt)
    wrMinFreqDf = pd.DataFrame(wrMinFreqVals)
    wrMinFreqDf.set_index('time_stamp', inplace=True)
    wrMinFreqDf['data_value'] = pd.to_numeric(wrMinFreqDf['data_value'])
    wrMinFreqDate = wrMinFreqDf['data_value'].idxmin()
    min_freq = round(wrMinFreqDf['data_value'].min(), 3)

    wrMinFreqTimeVals = mRepo.getFreqDailyData('time min f', startDt, endDt)
    wrMinFreqTimeDf = pd.DataFrame(wrMinFreqTimeVals)
    wrMinFreqTimeDf.set_index('time_stamp', inplace=True)
    wrMinFreqTime = wrMinFreqTimeDf.loc[wrMinFreqDate]['data_value']

    min_freq_time_str = "{0} at {1}".format(
        dt.datetime.strftime(wrMinFreqDate, "%d-%b-%y"), wrMinFreqTime)

    secData: ISection_1_1_freq = {
        'bet_band': bet_band,
        'avg_freq': avg_freq,
        'fdi': fdi,
        'max_freq': max_freq,
        'max_freq_time_str': max_freq_time_str,
        'min_freq': min_freq,
        'min_freq_time_str': min_freq_time_str
    }
    return secData
def fetchSection1_3_bContext(appDbConnStr: str, startDt: dt.datetime,
                             endDt: dt.datetime) -> ISection_1_3_b:
    constituentsInfos = getConstituentsMappings()
    mRepo = MetricsDataRepo(appDbConnStr)

    soFarHighestAllEntityReqVals = mRepo.getSoFarHighestAllEntityData(
        'soFarHighestRequirement', addMonths(startDt, -1))
    soFarHighestReqLookUp = {}
    for v in soFarHighestAllEntityReqVals:
        soFarHighestReqLookUp[v['constituent']] = {
            'value': v['data_value'],
            'ts': v['data_time']
        }

    soFarHighestAllEntityAvailVals = mRepo.getSoFarHighestAllEntityData(
        'soFarHighestAvailability', addMonths(startDt, -1))
    soFarHighestAvailLookUp = {}
    for v in soFarHighestAllEntityAvailVals:
        soFarHighestAvailLookUp[v['constituent']] = {
            'value': v['data_value'],
            'ts': v['data_time']
        }
    dispRows: List[ISoFarHighestDataRow] = []
    for cIter in range(len(constituentsInfos)):
        constInfo = constituentsInfos[cIter]
        availData = mRepo.getEntityMetricHourlyData(constInfo["entity_tag"],
                                                    "Demand(MW)", startDt,
                                                    endDt)
        loadSheddingData = mRepo.getEntityMetricHourlyData(
            constInfo["entity_tag"], "Load Shedding(MW)", startDt, endDt)
        availReqData = availData + loadSheddingData
        availReqDataDf = pd.DataFrame(availReqData)
        availReqDataDf = availReqDataDf.pivot(index='time_stamp',
                                              columns='metric_name',
                                              values='data_value')
        availReqDataDf.reset_index(inplace=True)
        availReqDataDf["Requirement"] = availReqDataDf["Demand(MW)"] + \
            availReqDataDf["Load Shedding(MW)"]
        maxReq = availReqDataDf["Requirement"].max()
        maxReqDt = availReqDataDf["time_stamp"].loc[
            availReqDataDf["Requirement"].idxmax()]
        maxAvail = availReqDataDf["Demand(MW)"].max()
        maxAvailDt = availReqDataDf["time_stamp"].loc[
            availReqDataDf["Demand(MW)"].idxmax()]
        maxShortagePerc = round(100 * (maxReq - maxAvail) / maxAvail, 2)

        prevHighestReqObj = soFarHighestReqLookUp[constInfo["entity_tag"]]
        newHighestReq = maxReq
        newHighestReqTime = maxReqDt.to_pydatetime()
        if newHighestReq < prevHighestReqObj["value"]:
            newHighestReq = prevHighestReqObj["value"]
            newHighestReqTime = prevHighestReqObj["ts"]

        prevHighestAvailObj = soFarHighestAvailLookUp[constInfo["entity_tag"]]
        newHighestAvail = maxAvail
        newHighestAvailTime = maxAvailDt.to_pydatetime()
        if newHighestAvail < prevHighestAvailObj["value"]:
            newHighestAvail = prevHighestAvailObj["value"]
            newHighestAvailTime = prevHighestAvailObj["ts"]
        newHighestShortagePerc = round(
            100 * (newHighestReq - newHighestAvail) / newHighestAvail, 2)
        newHighestAvailTime = newHighestAvailTime
        newHighestReqTime = newHighestReqTime
        mRepo.insertSoFarHighest(constInfo['entity_tag'],
                                 "soFarHighestAvailability", startDt,
                                 newHighestAvail, newHighestAvailTime)
        mRepo.insertSoFarHighest(constInfo['entity_tag'],
                                 "soFarHighestRequirement", startDt,
                                 newHighestReq, newHighestReqTime)

        const_display_row: ISoFarHighestDataRow = {
            'entity':
            constInfo['display_name'],
            'peakReqMW':
            round(maxReq),
            'peakAvailMW':
            round(maxAvail),
            'shortage_X':
            maxShortagePerc,
            'highestReqMW':
            round(newHighestReq),
            'highestAvailMW':
            round(newHighestAvail),
            'shortage_Y':
            newHighestShortagePerc,
            'highestReqMWDateStr':
            'Max on {0} at {1} hrs'.format(
                dt.datetime.strftime(newHighestReqTime, "%d.%m.%y"),
                dt.datetime.strftime(newHighestReqTime, "%H:%M")),
            'highestAvailMWDateStr':
            'Max on {0} at {1} hrs'.format(
                dt.datetime.strftime(newHighestAvailTime, "%d.%m.%y"),
                dt.datetime.strftime(newHighestAvailTime, "%H:%M"))
        }
        dispRows.append(const_display_row)

    secData: ISection_1_3_b = {"so_far_hig_req_avail": dispRows}
    return secData
Exemplo n.º 20
0
def fetchSection1_3_aContext(appDbConnStr: str, startDt: dt.datetime, endDt: dt.datetime) -> ISection_1_3_a:
    constituentsInfos = getConstituentsMappings()

    constConfig = {}
    for c in constituentsInfos:
        constConfig[c["entity_tag"]] = c["display_name"]

    dataRecords = pd.DataFrame()
    mRepo = MetricsDataRepo(appDbConnStr)
    prevFinYrStartDt = getFinYrDt(startDt)
    # get WR Unrestricted demand hourly values for this month and prev yr month
    allEntityReqMuVals = mRepo.getAllEntityMetricMonthlyData(
        'Requirement (MU)', startDt, endDt)
    allEntityAvailMuVals = mRepo.getAllEntityMetricMonthlyData(
        'Consumption(MU)', startDt, endDt)

    allEntityReqMuDf = pd.DataFrame(allEntityReqMuVals)
    allEntityAvailMuDf = pd.DataFrame(allEntityAvailMuVals)
    allEntityAvailMuDf = allEntityAvailMuDf.rename(columns={
        'metric_value': 'Consumption(MU)'})
    allEntityReqMuDf = allEntityReqMuDf.rename(columns={
        'metric_value': 'Requirement (MU)'})
    tempList = allEntityAvailMuDf['Consumption(MU)']
    allEntityReqMuDf['Consumption(MU)'] = tempList

    allEntityReqMuDf['shortage'] = round(100 *
                                         (allEntityReqMuDf['Requirement (MU)'] -
                                          allEntityReqMuDf['Consumption(MU)']) /
                                         allEntityReqMuDf['Consumption(MU)'], 2)
    # print(allEntityReqMuDf)

    prevYrAllEntityReqMuVals = mRepo.getAllEntityMetricMonthlyData(
        'Requirement (MU)', prevFinYrStartDt, endDt)
    prevYrAllEntityAvailMuVals = mRepo.getAllEntityMetricMonthlyData(
        'Consumption(MU)', prevFinYrStartDt, endDt)

    prevYrAllEntityReqMuDf = pd.DataFrame(prevYrAllEntityReqMuVals)
    prevYrAllEntityAvailMuDf = pd.DataFrame(prevYrAllEntityAvailMuVals)
    prevYrAllEntityAvailMuDf = prevYrAllEntityAvailMuDf.rename(columns={
        'metric_value': 'Consumption(MU)'})
    prevYrAllEntityReqMuDf = prevYrAllEntityReqMuDf.rename(columns={
        'metric_value': 'Requirement (MU)'})
    tempList = prevYrAllEntityAvailMuDf['Consumption(MU)']
    prevYrAllEntityReqMuDf['Consumption(MU)'] = tempList

    prevYrAllEntityReqMuDf['shortage'] = round(100 *
                                               (prevYrAllEntityReqMuDf['Requirement (MU)'] -
                                                prevYrAllEntityAvailMuDf['Consumption(MU)']) /
                                               prevYrAllEntityAvailMuDf['Consumption(MU)'], 2)
    prevYrAllEntityReqMuDf.set_index('entity_tag')
    dataRecords = pd.merge(
        allEntityReqMuDf, prevYrAllEntityReqMuDf, on='entity_tag')

    newNames = []
    for rIter in range(dataRecords.shape[0]):
        row = dataRecords.iloc[rIter, :]
        if row['entity_tag'] in constConfig:
            newNames.append(constConfig[row['entity_tag']])
        else:
            newNames.append(np.nan)
    dataRecords['entity_tag'] = newNames

    energyReqAvailList: ISection_1_3_a["energy_req_avail"] = []

    for i in dataRecords.index:
        energyReq: ISection_1_3_a = {
            'entity': dataRecords['entity_tag'][i],
            'reqMu_X': round(dataRecords['Requirement (MU)_x'][i]),
            'availMu_X': round(dataRecords['Consumption(MU)_x'][i]),
            'shortage_X': round(dataRecords['shortage_x'][i], 2),
            'reqMu_Y': round(dataRecords['Requirement (MU)_y'][i]),
            'availMu_Y': round(dataRecords['Consumption(MU)_y'][i]),
            'shortage_Y': round(dataRecords['shortage_y'][i], 2)
        }
        energyReqAvailList.append(energyReq)
    
    prevFinYrDateStr = dt.datetime.strftime(prevFinYrStartDt, "%b %y")
    sectionData: ISection_1_3_a = {
        "energy_req_avail": energyReqAvailList,
        "recent_fin_month_name": prevFinYrDateStr
    }

    return sectionData
Exemplo n.º 21
0
def fetchReservoirContext(appDbConnStr: str, startDt: dt.datetime,
                          endDt: dt.datetime) -> IReservoirSection:
    reservoirInfo = getReservoirsMappings()

    numPages = 0
    mRepo = MetricsDataRepo(appDbConnStr)
    prevFinYrStartDt = getFinYrDt(startDt)
    prevFinYear = dt.datetime.strftime(prevFinYrStartDt, "%Y")
    currYear = int(prevFinYear)

    # last financial year details
    lastFinYrStartDt = getPrevFinYrDt(prevFinYrStartDt)
    latsFinYrEndDt = prevFinYrStartDt - dt.timedelta(days=1)
    lastFinYrStarStr = dt.datetime.strftime(lastFinYrStartDt, "%Y")
    lastFinYrEndDt = int(lastFinYrStarStr) + 1

    for itr in reservoirInfo:
        metricList: List = []
        for entity in itr:
            if entity == 'entity_tag':
                continue
            else:
                metricList.append(
                    mRepo.getReservoirDailyData(itr['entity_tag'], itr[entity],
                                                prevFinYrStartDt, endDt))
        if len(metricList[0]) > 0:
            # create plot image for all the metrices
            pltDataObj: List = []
            for temp in range(len(metricList)):
                pltDataObj = pltDataObj + [{
                    'Date': x["time_stamp"],
                    'colName': x["metric_tag"],
                    'val': x["data_value"]
                } for x in metricList[temp]]
            pltDataDf = pd.DataFrame(pltDataObj)
            pltDataDf = pltDataDf.pivot(index='Date',
                                        columns='colName',
                                        values='val')
            pltDataDf.reset_index(inplace=True)

            # derive plot title
            pltTitle = '{0} {1}-{2}'.format(itr['entity_tag'], currYear,
                                            currYear + 1)

            # create a plotting area and get the figure, axes handle in return
            fig, ax = plt.subplots(figsize=(7.5, 4.5))

            # instantiate a second axes that shares the same x-axis
            ax2 = ax.twinx()
            # set plot title
            ax.set_title(pltTitle)
            # set y labels
            ax2.set_ylabel('MUs')
            ax.set_ylabel('Meter')

            # set x axis locator as month
            ax.xaxis.set_major_locator(mdates.MonthLocator())
            # set x axis formatter as month name
            ax.xaxis.set_major_formatter(mdates.DateFormatter('%b'))

            # set x axis locator as month
            ax2.xaxis.set_major_locator(mdates.MonthLocator())
            # set x axis formatter as month name
            ax2.xaxis.set_major_formatter(mdates.DateFormatter('%b'))

            # plot data and get the line artist object in return 'GEN.'

            clr = ['#00ccff', '#ff8533', '#ff0000', '#9900ff']
            for col in range(len(pltDataDf.columns) - 1):
                if pltDataDf.columns[col + 1] == 'GEN.':
                    last, = ax2.plot(pltDataDf['Date'],
                                     pltDataDf[pltDataDf.columns[col + 1]],
                                     color=clr[col],
                                     label=pltDataDf.columns[col + 1])
                else:
                    last, = ax.plot(pltDataDf['Date'],
                                    pltDataDf[pltDataDf.columns[col + 1]],
                                    color=clr[col],
                                    label=pltDataDf.columns[col + 1])
            # enable y axis grid lines
            ax.yaxis.grid(True)

            # Ensure a major tick for each week using (interval=1)
            ax.xaxis.set_major_locator(mdates.MonthLocator(interval=1))

            # enable legends
            ax2.legend()
            ax.legend(bbox_to_anchor=(0.0, -0.3, 1, 0),
                      loc='lower center',
                      ncol=3,
                      mode="expand",
                      borderaxespad=0.)
            fig.subplots_adjust(bottom=0.25, top=0.8)
            fig.savefig('assets/reservoir_section_{0}.png'.format(numPages))
            # fig.savefig('assets/reservoir_section/{0}-{1}.png'.format(itr['entity_tag'], currYear))
            # plt.close()
            numPages += 1

        prevFinYrMetricList: List = []
        for entity in itr:
            if entity == 'entity_tag':
                continue
            else:
                prevFinYrMetricList.append(
                    mRepo.getReservoirDailyData(itr['entity_tag'], itr[entity],
                                                lastFinYrStartDt,
                                                latsFinYrEndDt))
        if len(prevFinYrMetricList[0]) > 0:
            # create plot image for all the metrices
            lastFinYrPltDataObj: List = []
            for temp in range(len(prevFinYrMetricList)):
                lastFinYrPltDataObj = lastFinYrPltDataObj + [
                    {
                        'Date': x["time_stamp"],
                        'colName': x["metric_tag"],
                        'val': x["data_value"]
                    } for x in prevFinYrMetricList[temp]
                ]
            lastFinYrPltDataDf = pd.DataFrame(lastFinYrPltDataObj)
            lastFinYrPltDataDf = lastFinYrPltDataDf.pivot(index='Date',
                                                          columns='colName',
                                                          values='val')
            lastFinYrPltDataDf.reset_index(inplace=True)

            # derive plot title
            pltTitle = '{0} {1}-{2}'.format(itr['entity_tag'],
                                            lastFinYrStarStr, lastFinYrEndDt)

            # create a plotting area and get the figure, axes handle in return
            fig, ax = plt.subplots(figsize=(7.5, 4.5))

            # instantiate a second axes that shares the same x-axis
            ax2 = ax.twinx()
            # set plot title
            ax.set_title(pltTitle)
            # set y labels
            ax2.set_ylabel('MUs')
            ax.set_ylabel('Meter')

            # set x axis locator as month
            ax.xaxis.set_major_locator(mdates.MonthLocator())
            # set x axis formatter as month name
            ax.xaxis.set_major_formatter(mdates.DateFormatter('%b'))

            # set x axis locator as month
            ax2.xaxis.set_major_locator(mdates.MonthLocator())
            # set x axis formatter as month name
            ax2.xaxis.set_major_formatter(mdates.DateFormatter('%b'))

            # plot data and get the line artist object in return 'GEN.'

            clr = ['#00ccff', '#ff8533', '#ff0000', '#9900ff']
            for col in range(len(lastFinYrPltDataDf.columns) - 1):
                if lastFinYrPltDataDf.columns[col + 1] == 'GEN.':
                    last, = ax2.plot(
                        lastFinYrPltDataDf['Date'],
                        lastFinYrPltDataDf[lastFinYrPltDataDf.columns[col +
                                                                      1]],
                        color=clr[col],
                        label=lastFinYrPltDataDf.columns[col + 1])
                else:
                    last, = ax.plot(
                        lastFinYrPltDataDf['Date'],
                        lastFinYrPltDataDf[lastFinYrPltDataDf.columns[col +
                                                                      1]],
                        color=clr[col],
                        label=lastFinYrPltDataDf.columns[col + 1])
            # enable y axis grid lines
            ax.yaxis.grid(True)
            # Ensure a major tick for each week using (interval=1)
            ax.xaxis.set_major_locator(mdates.MonthLocator(interval=1))

            # enable legends
            ax2.legend()
            ax.legend(bbox_to_anchor=(0.0, -0.3, 1, 0),
                      loc='lower center',
                      ncol=3,
                      mode="expand",
                      borderaxespad=0.)
            fig.subplots_adjust(bottom=0.25, top=0.8)
            fig.savefig('assets/reservoir_section_{0}.png'.format(numPages))

            # fig.savefig('assets/reservoir_section/{0}-{1}.png'.format(itr['entity_tag'],
            #                                         lastFinYrEndDt))
            # plt.close()
            numPages += 1

    sectionData: IReservoirSection = {'num_plts_sec_reservoir': numPages}
    return sectionData
Exemplo n.º 22
0
def fetchSection1_7Context(appDbConnStr: str, startDt: dt.datetime,
                           endDt: dt.datetime) -> ISection_1_7:
    metricsInfo = getVoltMetrics()
    mRepo = MetricsDataRepo(appDbConnStr)

    allMetris = {}
    allMetris['400'] = {}
    allMetris['765'] = {}

    allStations = []

    for metricIndex in range(len(metricsInfo)):
        metricInfo = metricsInfo[metricIndex]

        voltLevel = metricInfo['voltageLevel']
        operation = metricInfo['operation']
        metricName = metricInfo['metric_name']

        if operation != 'compute ':
            allEntityMetricData = mRepo.getDailyVoltDataByLevel(
                voltLevel, metricName, startDt, endDt)
            allEntityMetricDf = pd.DataFrame(allEntityMetricData)
            allEntityMetricDf = allEntityMetricDf.groupby("entity_name")
            allStations = allEntityMetricDf.groups.keys()
            combinedObj = {}

            for eachStation in allStations:
                combinedObj[eachStation] = []

            for eachRecord in allEntityMetricData:
                if math.isnan(float(eachRecord['data_val'])):
                    eachRecord['data_val'] = 0
                combinedObj[eachRecord['entity_name']].append(
                    float(eachRecord['data_val']))

            stndWiseOperationData = {}
            for eachStation in allStations:
                stnData = combinedObj[eachStation]
                val = 0
                if operation == 'max':
                    val = max(stnData)

                elif operation == 'sum':
                    val = sum(stnData) / ((endDt - startDt).days + 1)
                    # val = sum(stnData)

                elif operation == 'min':
                    val = min(stnData)

                stndWiseOperationData[eachStation] = val

            allMetris[str(voltLevel)][metricName] = stndWiseOperationData

        else:
            refColumnName = metricInfo['evalColumn']
            multiplyFactor = metricInfo['multiply']
            allMetris[metricName] = {}

            stndWiseOperationData = {}

            for eachStation in allStations:
                val = allMetris[str(voltLevel)][refColumnName][eachStation]

                if multiplyFactor == 'monthHrs':
                    difference = endDt - startDt
                    days = difference.days + 1
                    factor = days * 24
                    stndWiseOperationData[eachStation] = int(val) * factor

                elif multiplyFactor == 'weekHrs':
                    factor = 24 * 7
                    stndWiseOperationData[eachStation] = int(val) / factor

            allMetris[str(voltLevel)][metricName] = stndWiseOperationData

    print(allMetris)

    df400 = pd.DataFrame(allMetris['400'])
    df765 = pd.DataFrame(allMetris['765'])

    df = df765.append(df400, ignore_index=True)
    allMetricRecords400KV: List[IVoltageRecord] = []
    allMetricRecords765V: List[IVoltageRecord] = []

    allMetricRecords400KV = builtIRecord(df400)
    allMetricRecords765V = builtIRecord(df765)

    secData: ISection_1_7 = {
        'voltageLevel400KV': allMetricRecords400KV,
        'voltageLevel765KV': allMetricRecords765V
    }

    return secData
Exemplo n.º 23
0
def fetchSection1_1_WindSolarContext(
        appDbConnStr: str, startDt: dt.datetime,
        endDt: dt.datetime) -> ISection_1_1_wind_solar:
    mRepo = MetricsDataRepo(appDbConnStr)
    # get solar mu
    solarMuVals = mRepo.getEntityMetricDailyData("wr", "Solar(MU)", startDt,
                                                 endDt)
    cgsSolarMuVals = mRepo.getEntityMetricDailyData("wr", "CGS Solar(Mus)",
                                                    startDt, endDt)

    tot_month_solar_gen_mu = 0
    for s, c in zip(solarMuVals, cgsSolarMuVals):
        tot_month_solar_gen_mu += s["data_value"] + c["data_value"]
    avg_month_solar_gen_mu = tot_month_solar_gen_mu / len(solarMuVals)

    # get solar mu for last year
    solarMuLastYrVals = mRepo.getEntityMetricDailyData("wr", "Solar(MU)",
                                                       addMonths(startDt, -12),
                                                       addMonths(endDt, -12))

    cgsSolarMuLastYrVals = mRepo.getEntityMetricDailyData(
        "wr", "CGS Solar(Mus)", addMonths(startDt, -12), addMonths(endDt, -12))

    tot_last_year_solar_gen_mu = 0
    for s, c in zip(solarMuLastYrVals, cgsSolarMuLastYrVals):
        tot_last_year_solar_gen_mu += s["data_value"] + c["data_value"]

    tot_last_year_solar_gen_mu_perc = round(
        100 * (tot_month_solar_gen_mu - tot_last_year_solar_gen_mu) /
        tot_last_year_solar_gen_mu, 2)

    tot_last_year_solar_gen_mu_perc_str = ""
    if tot_last_year_solar_gen_mu_perc < 0:
        tot_last_year_solar_gen_mu_perc_str = "reduced by {0}%".format(
            -1 * tot_last_year_solar_gen_mu_perc)
    else:
        tot_last_year_solar_gen_mu_perc_str = "increased by {0}%".format(
            tot_last_year_solar_gen_mu_perc)

    solarMwVals = mRepo.getEntityMetricHourlyData("wr", "Solar(MW)", startDt,
                                                  endDt)
    maxSolarMwObj = solarMwVals[0]
    for v in solarMwVals:
        if v["data_value"] > maxSolarMwObj["data_value"]:
            maxSolarMwObj = v
    max_month_solar_gen_mw = maxSolarMwObj["data_value"]
    max_month_solar_gen_mw_date = '{0} at {1} hrs'.format(
        dt.datetime.strftime(maxSolarMwObj["time_stamp"], "%d %b %Y"),
        dt.datetime.strftime(maxSolarMwObj["time_stamp"], "%H:%M"))

    # get wind mu
    windMuVals = mRepo.getEntityMetricDailyData("wr", "Wind(MU)", startDt,
                                                endDt)
    cgsWindMuVals = mRepo.getEntityMetricDailyData("wr", "CGS Wind(Mus)",
                                                   startDt, endDt)
    tot_month_wind_gen_mu = 0
    for w, c in zip(windMuVals, cgsWindMuVals):
        tot_month_wind_gen_mu += w["data_value"] + c["data_value"]
    avg_month_wind_gen_mu = tot_month_wind_gen_mu / len(windMuVals)

    # get wind mu for last year
    windMuLastYrVals = mRepo.getEntityMetricDailyData("wr", "Wind(MU)",
                                                      addMonths(startDt, -12),
                                                      addMonths(endDt, -12))
    cgsWindMuLastYrVals = mRepo.getEntityMetricDailyData(
        "wr", "CGS Wind(Mus)", addMonths(startDt, -12), addMonths(endDt, -12))
    tot_last_year_wind_gen_mu = 0
    for w, c in zip(windMuLastYrVals, cgsWindMuLastYrVals):
        tot_last_year_wind_gen_mu += w["data_value"] + c["data_value"]

    tot_last_year_wind_gen_mu_perc = round(
        100 * (tot_month_wind_gen_mu - tot_last_year_wind_gen_mu) /
        tot_last_year_wind_gen_mu, 2)

    tot_last_year_wind_gen_mu_perc_str = ""
    if tot_last_year_wind_gen_mu_perc < 0:
        tot_last_year_wind_gen_mu_perc_str = "reduced by {0}%".format(
            -1 * tot_last_year_wind_gen_mu_perc)
    else:
        tot_last_year_wind_gen_mu_perc_str = "increased by {0}%".format(
            tot_last_year_wind_gen_mu_perc)

    windMwVals = mRepo.getEntityMetricHourlyData("wr", "Wind(MW)", startDt,
                                                 endDt)
    maxWindMwObj = windMwVals[0]
    for v in windMwVals:
        if v["data_value"] > maxWindMwObj["data_value"]:
            maxWindMwObj = v
    max_month_wind_gen_mw = maxWindMwObj["data_value"]
    max_month_wind_gen_mw_date = '{0} at {1} hrs'.format(
        dt.datetime.strftime(maxWindMwObj["time_stamp"], "%d %b %Y"),
        dt.datetime.strftime(maxWindMwObj["time_stamp"], "%H:%M"))

    # create dataframe for solar and wind addition
    resDf = pd.DataFrame(windMwVals + solarMwVals)
    resDf = resDf.pivot(index="time_stamp",
                        columns="metric_name",
                        values="data_value")
    resDf["Renewable"] = resDf["Wind(MW)"] + resDf["Solar(MW)"]
    max_month_ren_gen_mw = resDf["Renewable"].max()
    maxRenDt = resDf["Renewable"].idxmax().to_pydatetime()
    max_month_ren_gen_mw_date = '{0} at {1} hrs'.format(
        dt.datetime.strftime(maxRenDt, "%d %b %Y"),
        dt.datetime.strftime(maxRenDt, "%H:%M"))
    secData: ISection_1_1_wind_solar = {
        'tot_month_wind_gen_mu': round(tot_month_wind_gen_mu),
        'avg_month_wind_gen_mu': round(avg_month_wind_gen_mu, 1),
        'tot_last_year_wind_gen_mu_perc_str':
        tot_last_year_wind_gen_mu_perc_str,
        'tot_last_year_wind_gen_mu': round(tot_last_year_wind_gen_mu),
        'max_month_wind_gen_mw': round(max_month_wind_gen_mw),
        'max_month_wind_gen_mw_date': max_month_wind_gen_mw_date,
        'tot_month_solar_gen_mu': round(tot_month_solar_gen_mu),
        'avg_month_solar_gen_mu': round(avg_month_solar_gen_mu, 1),
        'tot_last_year_solar_gen_mu_perc_str':
        tot_last_year_solar_gen_mu_perc_str,
        'tot_last_year_solar_gen_mu': round(tot_last_year_solar_gen_mu),
        'max_month_solar_gen_mw': round(max_month_solar_gen_mw),
        'max_month_solar_gen_mw_date': max_month_solar_gen_mw_date,
        'max_month_ren_gen_mw': round(max_month_ren_gen_mw),
        'max_month_ren_gen_mw_date': max_month_ren_gen_mw_date
    }
    return secData
Exemplo n.º 24
0
def fetchSection1_1_1Context(appDbConnStr: str, startDt: dt.datetime,
                             endDt: dt.datetime) -> ISection_1_1_1:
    monthDtObj = dt.datetime(startDt.year, startDt.month, 1)
    month_name = dt.datetime.strftime(startDt, "%b' %y")
    full_month_name = dt.datetime.strftime(startDt, "%B %Y")
    mRepo = MetricsDataRepo(appDbConnStr)
    # get WR Unrestricted demand hourly values for this month and prev yr month
    wrDemVals = mRepo.getEntityMetricHourlyData('wr', 'Demand(MW)', startDt,
                                                endDt)
    wrLoadSheddingVals = mRepo.getEntityMetricHourlyData(
        'wr', 'Load Shedding(MW)', startDt, endDt)
    wrUnResDemDf = pd.DataFrame(wrDemVals + wrLoadSheddingVals)
    wrUnResDemDf = wrUnResDemDf.pivot(index='time_stamp',
                                      columns='metric_name',
                                      values='data_value')
    wrUnResDemDf['UnresDem'] = wrUnResDemDf['Demand(MW)'] + \
        wrUnResDemDf['Load Shedding(MW)']

    lastYrStartDt = addMonths(startDt, -12)
    lastYrEndDt = addMonths(endDt, -12)
    last_yr_month_name = dt.datetime.strftime(lastYrStartDt, "%b %y")
    wrLastYrDemVals = mRepo.getEntityMetricHourlyData('wr', 'Demand(MW)',
                                                      lastYrStartDt,
                                                      lastYrEndDt)
    wrLastYrLoadSheddingVals = mRepo.getEntityMetricHourlyData(
        'wr', 'Load Shedding(MW)', lastYrStartDt, lastYrEndDt)
    wrLastYrUnResDemDf = pd.DataFrame(wrLastYrDemVals +
                                      wrLastYrLoadSheddingVals)
    wrLastYrUnResDemDf = wrLastYrUnResDemDf.pivot(index='time_stamp',
                                                  columns='metric_name',
                                                  values='data_value')
    wrLastYrUnResDemDf['UnresDem'] = wrLastYrUnResDemDf['Demand(MW)'] + \
        wrLastYrUnResDemDf['Load Shedding(MW)']

    wr_max_unres_dem = round(wrUnResDemDf['UnresDem'].max())
    maxUnresDemDt = wrUnResDemDf['UnresDem'].idxmax()
    wr_max_unres_dem_time_str = "{0} Hrs on {1}".format(
        dt.datetime.strftime(maxUnresDemDt, "%H:%M"),
        dt.datetime.strftime(maxUnresDemDt, "%d-%b-%y"))

    wr_max_unres_dem_last_yr = round(wrLastYrUnResDemDf['UnresDem'].max())

    wr_max_unres_dem_perc_inc = 100 * \
        (wr_max_unres_dem - wr_max_unres_dem_last_yr)/wr_max_unres_dem_last_yr
    wr_max_unres_dem_perc_inc = round(wr_max_unres_dem_perc_inc, 2)

    wr_avg_unres_dem = round(wrUnResDemDf['UnresDem'].mean())
    wr_avg_unres_dem_last_yr = round(wrLastYrUnResDemDf['UnresDem'].mean())
    wr_avg_unres_dem_perc_inc = round(
        100 * (wr_avg_unres_dem - wr_avg_unres_dem_last_yr) /
        wr_avg_unres_dem_last_yr, 2)
    secData: ISection_1_1_1 = {
        'monthDtObj': monthDtObj,
        'month_name': month_name,
        'full_month_name': full_month_name,
        'last_yr_month_name': last_yr_month_name,
        'wr_max_unres_dem': wr_max_unres_dem,
        'wr_max_unres_dem_time_str': wr_max_unres_dem_time_str,
        'wr_max_unres_dem_perc_inc': wr_max_unres_dem_perc_inc,
        'wr_max_unres_dem_last_yr': wr_max_unres_dem_last_yr,
        'wr_avg_unres_dem': wr_avg_unres_dem,
        'wr_avg_unres_dem_last_yr': wr_avg_unres_dem_last_yr,
        'wr_avg_unres_dem_perc_inc': wr_avg_unres_dem_perc_inc
    }
    return secData
def fetchSection2_3_MinContext(appDbConnStr: str, startDt: dt.datetime,
                               endDt: dt.datetime) -> ISection_2_3_2:
    monthDtObj = dt.datetime(startDt.year, startDt.month, 1)
    month_name = dt.datetime.strftime(startDt, "%b' %y")
    full_month_name = dt.datetime.strftime(startDt, "%B %Y")
    constituentinfo = getConstituentsMappings()
    numPages = 0

    mRepo = MetricsDataRepo(appDbConnStr)
    # get WR Unrestricted demand hourly values for this month and prev yr month
    wrDemVals = mRepo.getEntityMetricHourlyData('wr', 'Demand(MW)', startDt,
                                                endDt)

    wrUnResDemDf = pd.DataFrame(wrDemVals)
    wrUnResDemDf = wrUnResDemDf.set_index('time_stamp')

    wr_max_unres_dem = round(wrUnResDemDf['data_value'].min())
    minUnresDemDt = wrUnResDemDf['data_value'].idxmin()

    minDtSrtT = dt.datetime(minUnresDemDt.year, minUnresDemDt.month,
                            minUnresDemDt.day)
    minDtEndT = dt.datetime(minUnresDemDt.year, minUnresDemDt.month,
                            minUnresDemDt.day, 23)
    wr_min_unres_dem_time_str = "HOURLY MIN.DEMAND CURVES ON REGIONAL MINIMUM DAY {1} at {0} Hrs".format(
        dt.datetime.strftime(minUnresDemDt, "%H:%M"),
        dt.datetime.strftime(minUnresDemDt, "%d-%b-%y"))
    wrUnResDemDf = wrUnResDemDf[(wrUnResDemDf.index >= minDtSrtT)
                                & (wrUnResDemDf.index <= minDtEndT)]

    # create a plotting area and get the figure, axes handle in return
    fig, ax = plt.subplots(figsize=(7.5, 2.5))
    # set plot title
    pltTitle = "WR"
    ax.set_title(pltTitle)
    # set x and y labels
    ax.set_xlabel('HOUR')
    ax.set_ylabel('MW')
    ax.set_facecolor("violet")
    fig.patch.set_facecolor('#95d0fc')

    wrUnResDemDf.reset_index(inplace=True)
    dateList = []
    times = wrUnResDemDf["time_stamp"]
    for col in times:
        dateList.append(dt.datetime.strftime(col, '%H'))

    wrUnResDemDf["time_stamp"] = dateList
    # plot data and get the line artist object in return
    laThisMonth, = ax.plot(wrUnResDemDf['time_stamp'],
                           wrUnResDemDf['data_value'],
                           color='#0000ff')

    # enable axis grid lines
    ax.yaxis.grid(True)
    ax.xaxis.grid(True)
    fig.subplots_adjust(bottom=0.25, top=0.8)
    fig.suptitle(wr_min_unres_dem_time_str, fontsize=10)
    fig.savefig('assets/section_2_3_2_{0}.png'.format(numPages))
    numPages += 1

    for itr in constituentinfo:
        if itr['entity_tag'] == 'wr':
            continue
        else:

            # get WR Unrestricted demand hourly values for this month and prev yr month
            constDemVals = mRepo.getEntityMetricHourlyData(
                itr['entity_tag'], 'Demand(MW)', minDtSrtT, minDtEndT)

            constUnResDemDf = pd.DataFrame(constDemVals)
            constUnResDemDf = constUnResDemDf.set_index('time_stamp')
            constUnResDemDf.reset_index(inplace=True)
            dateList = []
            times = constUnResDemDf["time_stamp"]
            for col in times:
                dateList.append(dt.datetime.strftime(col, '%H'))

            constUnResDemDf["time_stamp"] = dateList

            # create a plotting area and get the figure, axes handle in return
            fig, ax = plt.subplots(figsize=(7.5, 2.5))
            # set plot title
            pltTitle = itr['display_name']
            ax.set_title(pltTitle)
            # set x and y labels
            ax.set_xlabel('HOUR')
            ax.set_ylabel('MW')
            laThisMonth, = ax.plot(constUnResDemDf['time_stamp'],
                                   constUnResDemDf['data_value'],
                                   color='#0000ff')
            # enable axis grid lines
            ax.yaxis.grid(True)
            ax.xaxis.grid(True)
            fig.subplots_adjust(bottom=0.25, top=0.8)
            ax.set_facecolor(itr['plot_c'])
            fig.patch.set_facecolor(itr['bac_c'])
            fig.savefig('assets/section_2_3_2_{0}.png'.format(numPages))
            numPages += 1

    sectionData = {'num_plts_sec_min_hourly': numPages}
    return sectionData
def fetchIexGraphContext(appDbConnStr: str, startDt: dt.datetime, endDt: dt.datetime) -> bool:
    mRepo = MetricsDataRepo(appDbConnStr)
    
    # get iex rtm data for the range between start date and end date
    startDt = endDt - dt.timedelta(days=1)
    iexDamMcvVals = mRepo.getIexDamBlockWiseData('MCV (MW)', startDt, endDt)
    iexDamMcpVals = mRepo.getIexDamBlockWiseData('MCP (Rs/MWh) ', startDt, endDt)
    for itr in range(len(iexDamMcvVals)):
        iexDamMcvVals[itr]['metric_name'] = 'DAM MCV(MW)'
        iexDamMcvVals[itr]['time_stamp'] = itr+1
    for itr in range(len(iexDamMcpVals)):
        iexDamMcpVals[itr]['metric_name'] = 'DAM MCP(Rs/KWH)'
        iexDamMcpVals[itr]['time_stamp'] = itr+1
    iexRtmMcvVals = mRepo.getIexRtmBlockWiseData('MCV (MW)', startDt, endDt)
    iexRtmMcpVals = mRepo.getIexRtmBlockWiseData('MCP (Rs/MWh) ', startDt, endDt)
    for itr in range(len(iexRtmMcvVals)):
        iexRtmMcvVals[itr]['metric_name'] = 'RTM MCV(MW)'
        iexRtmMcvVals[itr]['time_stamp'] = itr+1
    for itr in range(len(iexRtmMcpVals)):
        iexRtmMcpVals[itr]['metric_name'] = 'RTM MCP(Rs/KWH)'
        iexRtmMcpVals[itr]['time_stamp'] = itr+1

    # create plot image for demands of prev yr, prev month, this month
    iexDamMcvObjs = [{'Date': 
        x["time_stamp"], 'colName': x['metric_name'], 'val': x["data_value"]} for x in iexDamMcvVals]
    iexDamMcpObjs = [{'Date': 
        x["time_stamp"], 'colName': x['metric_name'], 'val': x["data_value"]} for x in iexDamMcpVals]
    iexRtmMcvObjs = [{'Date': 
        x["time_stamp"], 'colName': x['metric_name'], 'val': x["data_value"]} for x in iexRtmMcvVals]
    iexRtmMcpObjs = [{'Date': 
        x["time_stamp"], 'colName': x['metric_name'], 'val': x["data_value"]} for x in iexRtmMcpVals]
    pltDataObjs = iexDamMcvObjs + iexDamMcpObjs + iexRtmMcvObjs + iexRtmMcpObjs
    pltDataDf = pd.DataFrame(pltDataObjs)
    pltDataDf = pltDataDf.pivot(
        index='Date', columns='colName', values='val')
    pltDataDf['DAM MCP(Rs/KWH)'] = pltDataDf['DAM MCP(Rs/KWH)']/1000
    pltDataDf['RTM MCP(Rs/KWH)'] = pltDataDf['RTM MCP(Rs/KWH)']/1000
    pltDataDf['RTM MCP(Rs/KWH)'].replace(to_replace=0, method='ffill', inplace=True)
    pltDataDf['RTM MCV(MW)'].replace(to_replace=0, method='ffill', inplace=True)

    # derive plot title
    pltTitle = 'MCP & MCV Data as per IEX Data'
    dateStr = startDt.strftime("%d-%m-%Y")
    pltTitle = 'MCP & MCV Data as per IEX Data for {0}'.format(dateStr)

    # create a plotting area and get the figure, axes handle in return
    fig, ax = plt.subplots(figsize=(7.5, 4.5))
    # instantiate a second axes that shares the same x-axis
    ax2 = ax.twinx()
    # set plot title
    ax.set_title(pltTitle)
    # set y labels
    ax2.set_ylabel('Rs/KWH')
    ax.set_ylabel('MWH')
    ax.set_facecolor("#474747")

    # set y axis limit
    # fig.patch.set_facecolor('#d9ccff')

    clr = ['#66b3ff', '#df80ff', '#ff6666', '#00b359']
    # set x xis manually
    x_test = [1,6,11,16,21,26,31,36,41,46,51,56,61,66,71,76,81,86,91,96]

    # plot data and get the line artist object in return
    laThisMonth, = ax.plot(
        pltDataDf.index.values, pltDataDf['DAM MCV(MW)'].values, color='#66b3ff')
    laThisMonth.set_label('DAM MCV(MW)')

    laLastYear, = ax2.plot(
        pltDataDf.index.values, pltDataDf['DAM MCP(Rs/KWH)'].values, color='#df80ff')
    laLastYear.set_label('DAM MCP(Rs/KWH)')

    laPrevMonth, = ax.plot(
        pltDataDf.index.values, pltDataDf['RTM MCV(MW)'].values, color='#00b359')
    laPrevMonth.set_label('RTM MCV(MW)')

    laPrevMonth, = ax2.plot(
        pltDataDf.index.values, pltDataDf['RTM MCP(Rs/KWH)'].values, color='#ff6666')
    laPrevMonth.set_label('RTM MCP(Rs/KWH)')

    ax.set_xlim((1,96), auto = True)
    ax.set_xticks([1,6,11,16,21,26,31,36,41,46,51,56,61,66,71,76,81,86,91,96])
    # ax.set_xlim((1, 31), auto=True)
    # enable legends
    ax.legend(bbox_to_anchor=(0.0, -0.3, 1, 0), loc='best',
              ncol=3, mode="expand", borderaxespad=0.)
    ax2.legend(bbox_to_anchor=(0.0, -0.3, 1, 0), loc='lower right',
              ncol=3, mode="expand", borderaxespad=0.)
    fig.subplots_adjust(bottom=0.25, top=0.8)
    fig.savefig('assets/section_1_3.png')
    plt.close()

    return True
def fetchSection1_11_LoadCurve(appDbConnStr: str, startDt: dt.datetime,
                               endDt: dt.datetime):
    mRepo = MetricsDataRepo(appDbConnStr)

    pltDataObj: list = []

    totalGen = mRepo.getEntityREHourlyData('wr', startDt, endDt)
    df = pd.DataFrame(totalGen)

    max_total_gen_position = df['val'].idxmax()
    max_total_gen_dt = df['time_stamp'].iloc[max_total_gen_position]
    max_str_date = dt.datetime(max_total_gen_dt.year, max_total_gen_dt.month,
                               max_total_gen_dt.day)

    totalGenOnMaxGenDay = mRepo.getEntityREHourlyData('wr', max_str_date,
                                                      max_str_date)
    wrDemandOnMaxGenDay = mRepo.getEntityMetricHourlyData(
        'wr', 'Demand(MW)', max_str_date, max_str_date)

    pltDataDf = pd.DataFrame()
    pltDataDf['hours'] = [
        x['time_stamp'].hour + 1 for x in totalGenOnMaxGenDay
    ]
    pltDataDf['reGen'] = [x['val'] for x in totalGenOnMaxGenDay]
    pltDataDf['wrDemand'] = [x['data_value'] for x in wrDemandOnMaxGenDay]
    pltDataDf['netLoad'] = [
        float(x['data_value']) - float(y['val'])
        for x, y in zip(wrDemandOnMaxGenDay, totalGenOnMaxGenDay)
    ]

    pltDataDf.to_excel("assets/plot_1_11_netloadcurve.xlsx")

    pltTitle = 'Net load Curve when RES (wind and Solar) Generation was max on {0} '.format(
        max_str_date.strftime('%d-%m-%Y'))

    fig, ax = plt.subplots(figsize=(7.5, 5.6))

    ax.set_title(pltTitle)
    ax.set_ylabel('MW')
    ax.set_xlabel('Hour')

    ax.xaxis.set_major_locator(mdates.DayLocator(interval=1))
    # ax.xaxis.set_major_formatter(mdates.DateFormatter('%H'))

    clr = ['#03a56a', '#a5033e', '#00ffff']
    x = pltDataDf['hours']
    y = [pltDataDf['reGen'], pltDataDf['netLoad'], pltDataDf['wrDemand']]

    # ax.stackplot(x, y , labels=['Wind+Solar' ,'Net Load', 'WR Demand'],baseline='sym')

    ax.fill_between(x,
                    y[0],
                    color='#3f9a3c',
                    alpha=1,
                    label='Wind+Solar',
                    zorder=3)
    ax.fill_between(x,
                    y[1],
                    color='#b03b3d',
                    alpha=1,
                    label='Net Load',
                    zorder=2)
    ax.fill_between(x,
                    y[2],
                    color='#5aeaeb',
                    alpha=1,
                    label='WR Demand',
                    zorder=1)

    ax.yaxis.grid(True)
    ax.legend(bbox_to_anchor=(0.5, -0.2, 0.0, 0.0),
              loc='center',
              ncol=4,
              borderaxespad=0.)

    # plt.xticks(rotation=90)
    ax.set_xlim(xmin=1, xmax=24)
    fig.subplots_adjust(bottom=0.25, top=0.8)

    fig.savefig('assets/section_1_11_netLoadCurve.png')
    # plt.show()
    # plt.close()

    secData: dict = {}

    return secData
Exemplo n.º 28
0
def fetchSection1_6_1Context(appDbConnStr: str, startDt: dt.datetime, endDt: dt.datetime) -> ISection_1_6_1:
    monthName = dt.datetime.strftime(startDt, "%b %y")
    mRepo = MetricsDataRepo(appDbConnStr)
    # get WR freq Daily values for this month
    freqLessThanBandVals = mRepo.getFreqDailyData('< 49.9', startDt, endDt)
    freqLessVals = [x['data_value'] for x in freqLessThanBandVals]
    freqLessVals = [float(i) for i in freqLessVals]
    freq_max_less_band = max(freqLessVals)
    freq_avg_less_band = sum(freqLessVals)/len(freqLessVals)

    freqBetBandVals = mRepo.getFreqDailyData(
        '>= 49.9 - <= 50.05', startDt, endDt)
    freBetVals = [x['data_value'] for x in freqBetBandVals]
    freBetVals = [float(i) for i in freBetVals]
    freq_max_bet_band = max(freBetVals)
    freq_avg_bet_band = sum(freBetVals)/len(freBetVals)

    freqGreaterThanBandVals = mRepo.getFreqDailyData('> 50', startDt, endDt)
    freGreaterThanVals = [x['data_value'] for x in freqGreaterThanBandVals]
    freGreaterThanVals = [float(i) for i in freGreaterThanVals]
    freq_max_greater_than_band = max(freGreaterThanVals)
    freq_avg_greater_than_band = sum(
        freGreaterThanVals)/len(freGreaterThanVals)

    freqFviVals = mRepo.getFreqDailyData('FVI', startDt, endDt)
    freqFVIVals = [x['data_value'] for x in freqFviVals]
    freqFVIVals = [float(i) for i in freqFVIVals]
    max_fvi = max(freqFVIVals)
    avg_fvi = sum(freqFVIVals)/len(freqFVIVals)

    hrsOutOfBandVals = mRepo.getFreqDailyData(
        'Hrs Out of IEGC', startDt, endDt)
    hrsOutOfBand = [x['data_value'] for x in hrsOutOfBandVals]
    hrsOutOfBand = [float(i) for i in hrsOutOfBand]
    hrs_max_out_of_band = max(hrsOutOfBand)
    hrs_avg_out_of_band = sum(hrsOutOfBand)/len(hrsOutOfBand)

    dailyFdi = [x/24 for x in hrsOutOfBand]
    max_Fdi = max(dailyFdi)
    avg_Fdi = sum(dailyFdi)/len(dailyFdi)

    perc_time_out_of_band = [x*100 for x in dailyFdi]
    max_perc_time = max(perc_time_out_of_band)
    avg_perc_time = sum(perc_time_out_of_band)/len(perc_time_out_of_band)

    freqDailyMaxVals = mRepo.getFreqDailyData('max inst f', startDt, endDt)
    freqMaxVals = [x['data_value'] for x in freqDailyMaxVals]
    freqMaxVals = [float(i) for i in freqMaxVals]
    max_monthly_freq = max(freqMaxVals)

    freqDailyMinVals = mRepo.getFreqDailyData('min inst f', startDt, endDt)
    freqMinVals = [x['data_value'] for x in freqDailyMinVals]
    freqMinVals = [float(i) for i in freqMinVals]
    min_monthly_freq = min(freqMinVals)

    freqDailyAvgVals = mRepo.getFreqDailyData('avg frq', startDt, endDt)
    freqAvgVals = [x['data_value'] for x in freqDailyAvgVals]
    freqAvgVals = [float(i) for i in freqAvgVals]
    avg_monthly_freq = sum(freqAvgVals)/len(freqAvgVals)

    # create plot image for freq profile
    pltLessThanBandObjs = [{'Date': convertDtToDayNumMonth(
        x["time_stamp"]), 'colName': x["metric_name"], 'val': x["data_value"]} for x in freqLessThanBandVals]
    pltBetBandObjs = [{'Date': convertDtToDayNumMonth(
        x["time_stamp"]), 'colName': x["metric_name"], 'val': x["data_value"]} for x in freqBetBandVals]
    pltGreaterThanBandObjs = [{'Date': convertDtToDayNumMonth(
        x["time_stamp"]), 'colName': x["metric_name"], 'val': x["data_value"]} for x in freqGreaterThanBandVals]
    pltFviObjs = [{'Date': convertDtToDayNumMonth(
        x["time_stamp"]), 'colName': x["metric_name"], 'val': x["data_value"]} for x in freqFviVals]
    pltHrsOutOfBandObjs = [{'Date': convertDtToDayNumMonth(
        x["time_stamp"]), 'colName': x["metric_name"], 'val': x["data_value"]} for x in hrsOutOfBandVals]
    pltDailyMaxFreqObjs = [{'Date': convertDtToDayNumMonth(
        x["time_stamp"]), 'colName': x["metric_name"], 'val': x["data_value"]} for x in freqDailyMaxVals]
    pltDailyMinFreqObjs = [{'Date': convertDtToDayNumMonth(
        x["time_stamp"]), 'colName': x["metric_name"], 'val': x["data_value"]} for x in freqDailyMinVals]
    pltDailyAvgFreqObjs = [{'Date': convertDtToDayNumMonth(
        x["time_stamp"]), 'colName': x["metric_name"], 'val': x["data_value"]} for x in freqDailyAvgVals]
    tableDataObjs = pltLessThanBandObjs + pltBetBandObjs + pltGreaterThanBandObjs + \
        pltFviObjs + pltHrsOutOfBandObjs + \
        pltDailyMaxFreqObjs + pltDailyMinFreqObjs + pltDailyAvgFreqObjs

    pltFreqGraphObjs = pltLessThanBandObjs + \
        pltBetBandObjs + pltGreaterThanBandObjs

    tableDataDf = pd.DataFrame(tableDataObjs)
    tableDataDf = tableDataDf.pivot(
        index='Date', columns='colName', values='val')
    tableDataDf.reset_index(inplace=True)
    tableDataDf['fdi'] = dailyFdi

    tableDataDf['out_of_band_perc'] = perc_time_out_of_band

    freqProfileList: ISection_1_6_1["freq_profile"] = []

    for i in tableDataDf.index:
        freqProf: IFreqDetails = {
            'date': tableDataDf['Date'][i],
            'less_than_band': round(float(tableDataDf['< 49.9'][i]), 2),
            'freq_bet_band': round(float(tableDataDf['>= 49.9 - <= 50.05'][i]), 2),
            'out_of_band': round(float(tableDataDf['> 50'][i]), 2),
            'fvi': round(float(tableDataDf['FVI'][i]), 2),
            'out_of_band_perc': round(float(tableDataDf['out_of_band_perc'][i]), 2),
            'hrs_out_of_band': round(float(tableDataDf['Hrs Out of IEGC'][i]), 2),
            'fdi': round(float(tableDataDf['fdi'][i]), 2),
            'freq_daily_max': round(float(tableDataDf['max inst f'][i]), 2),
            'freq_daily_min': round(float(tableDataDf['min inst f'][i]), 2),
            'freq_daily_avg': round(float(tableDataDf['avg frq'][i]), 2)
        }
        freqProfileList.append(freqProf)

    sectionData: ISection_1_6_1 = {
        "freq_profile": freqProfileList,
        "freq_max_less_band": round(freq_max_less_band, 2),
        "freq_avg_less_band": round(freq_avg_less_band, 2),
        "freq_max_bet_band": round(freq_max_bet_band, 2),
        "freq_avg_bet_band": round(freq_avg_bet_band, 2),
        "freq_max_greater_than_band": round(freq_max_greater_than_band, 2),
        "freq_avg_greater_than_band": round(freq_avg_greater_than_band, 2),
        "max_fvi": round(max_fvi, 2),
        "avg_fvi": round(avg_fvi, 2),
        "hrs_max_out_of_band": round(hrs_max_out_of_band, 2),
        "hrs_avg_out_of_band": round(hrs_avg_out_of_band, 2),
        "max_Fdi": round(max_Fdi, 2),
        "avg_Fdi": round(avg_Fdi, 2),
        "max_perc_time": round(max_perc_time, 2),
        "avg_perc_time": round(avg_perc_time, 2),
        "max_monthly_freq": round(max_monthly_freq, 2),
        "min_monthly_freq": round(min_monthly_freq, 2),
        "avg_monthly_freq": round(avg_monthly_freq, 2)
    }

    pltFreqGraphDf = pd.DataFrame(pltFreqGraphObjs)
    pltFreqGraphDf = pltFreqGraphDf.pivot(
        index='Date', columns='colName', values='val')
    pltFreqGraphDf.reset_index(inplace=True)
    # save plot data as excel
    pltFreqGraphDf.to_excel("assets/plot_1_6_2.xlsx", index=True)

    # derive plot title
    pltTitle = 'Frequency Profile for {0}'.format(monthName)

    # create a plotting area and get the figure, axes handle in return
    fig, ax = plt.subplots(figsize=(7.5, 4.5))
    # set plot title
    ax.set_title(pltTitle)
    
    fig.patch.set_facecolor('#dce6f2')

    # plot data and get the line artist object in return
    width = 0.4
    plt.bar(pltFreqGraphDf['Date'], freqLessVals,
            width=0.4, color='#ff0066', label='<49.9')

    plt.bar(pltFreqGraphDf['Date'], freBetVals, width,
            bottom=freqLessVals, color='#00cc66', label='49.9-50.05')

    greatorThanBandBottom = list(np.add(freqLessVals, freBetVals))

    plt.bar(pltFreqGraphDf['Date'], freGreaterThanVals, width,
            bottom=greatorThanBandBottom, color='#0086b3', label='>50.05')

    plt.xticks(rotation=90)
    # plt.legend()
    plt.legend(bbox_to_anchor=(0.0, -0.35, 1, 0), loc='lower center',
               ncol=3, mode="expand", borderaxespad=0.)
    fig.subplots_adjust(bottom=0.25, left=0.1, right=0.99)
    fig.savefig('assets/section_1_6_2.png')
    # plt.show()
    return sectionData
Exemplo n.º 29
0
def fetchSection1_5_1Context(appDbConnStr: str, startDt: dt.datetime, endDt: dt.datetime) -> dict:
    # get WR demand from recent Fin year start till this month
    # and WR demand from 2 years back fin year to last fin year
    # example: For Jan 21, we require data from 1-Apr-2019 to 31-Mar-2020 and 1-Apr-2020 to 31 Jan 21

    finYrStart = getFinYrDt(startDt)
    prevFinYrStart = getPrevFinYrDt(finYrStart)

    finYrName = '{0}-{1}'.format(finYrStart.year, (finYrStart.year+1) % 100)
    prevFinYrName = '{0}-{1}'.format(finYrStart.year-1, finYrStart.year % 100)
    mRepo = MetricsDataRepo(appDbConnStr)
    # get WR hourly demand values for this financial year
    wrDemVals = mRepo.getEntityMetricDailyData(
        'wr', 'Max Demand(MW)', finYrStart, endDt)
    wrPrevFinYrDemVals = mRepo.getEntityMetricDailyData(
        'wr', 'Max Demand(MW)', prevFinYrStart, finYrStart-dt.timedelta(days=1))

    # create plot image for demands of prev fin year and this fin year
    pltDemObjs = [{'MONTH': x["time_stamp"], 'colName': finYrName,
                   'val': x["data_value"]} for x in wrDemVals]
    pltDemObjsLastYear = [{'MONTH': x["time_stamp"],
                           'colName': prevFinYrName, 'val': x["data_value"]} for x in wrPrevFinYrDemVals]

    pltDataObjs = pltDemObjs + pltDemObjsLastYear

    pltDataDf = pd.DataFrame(pltDataObjs)
    pltDataDf = pltDataDf.pivot(
        index='MONTH', columns='colName', values='val')
    maxTs = pltDataDf.index.max()
    for rIter in range(pltDataDf.shape[0]):
        # check if Prev fin Yr data column is not Nan, if yes set this year data column
        lastYrDt = pltDataDf.index[rIter].to_pydatetime()
        if not pd.isna(pltDataDf[prevFinYrName].iloc[rIter]):
            thisYrTs = pd.Timestamp(addMonths(lastYrDt, 12))
            if thisYrTs <= maxTs:
                thisYrVal = pltDataDf[finYrName].loc[thisYrTs]
                pltDataDf.at[pd.Timestamp(lastYrDt), finYrName] = thisYrVal
    pltDataDf = pltDataDf[~pltDataDf[prevFinYrName].isna()]

    # save plot data as excel
    pltDataDf.to_excel("assets/plot_1_5_1.xlsx", index=True)

    # derive plot title
    pltTitle = 'WR seasonal demand (daily max.) Plot {0} to {1}'.format(
        prevFinYrName, finYrName)

    # create a plotting area and get the figure, axes handle in return
    fig, ax = plt.subplots(figsize=(7.5, 4.8))
    # set plot title
    ax.set_title(pltTitle)
    # set x and y labels
    ax.set_xlabel('MONTH')
    ax.set_ylabel('MW')

    # set x axis locator as month
    ax.xaxis.set_major_locator(mdates.MonthLocator())
    # set x axis formatter as month name
    ax.xaxis.set_major_formatter(mdates.DateFormatter('%b'))

    # ax.set_xlim(xmin=finYrStart)

    # plot data and get the line artist object in return
    laThisYr, = ax.plot(pltDataDf.index.values,
                        pltDataDf[finYrName].values, color='#ff0000')
    laThisYr.set_label(finYrName)

    laLastYear, = ax.plot(pltDataDf.index.values,
                          pltDataDf[prevFinYrName].values, color='#0000ff')
    laLastYear.set_label(prevFinYrName)

    # enable axis grid lines
    ax.yaxis.grid(True)
    ax.xaxis.grid(True)
    # enable legends
    ax.legend(bbox_to_anchor=(0.0, -0.3, 1, 0), loc='lower center',
              ncol=2, borderaxespad=0.)
    fig.subplots_adjust(bottom=0.25, top=0.8)
    fig.savefig('assets/section_1_5_1.png')

    secData: dict = {}
    return secData
def fetchSection1_1_voltContext(appDbConnStr: str, startDt: dt.datetime, endDt: dt.datetime) -> ISection_1_1_volt:
    monthDtObj = dt.datetime(startDt.year, startDt.month, 1)
    month_name = dt.datetime.strftime(startDt, "%b %y")
    mRepo = MetricsDataRepo(appDbConnStr)
    # get high voltage violation data for 765 kV
    voltData765 = mRepo.getDailyVoltDataByLevel(
        765, "%Time >420 or 800", startDt, endDt)
    # convert to dataframe
    voltData765Df = pd.DataFrame(voltData765)
    # take only required columns
    voltData765Df = voltData765Df[["entity_name", "data_val"]]
    # convert column to numeric
    voltData765Df['data_val'] = pd.to_numeric(
        voltData765Df['data_val'], errors='coerce')
    # get mean for each substation
    voltData765Df = voltData765Df.groupby("entity_name").mean()
    voltData765Df.reset_index(inplace=True)
    # check if there is violation more than 0.1
    is765MoreThan10Perc = voltData765Df[voltData765Df["data_val"]
                                        > 10].shape[0] > 0
    msg765 = ""
    if not is765MoreThan10Perc:
        msgStations = voltData765Df[voltData765Df["data_val"]
                                    > 0]["entity_name"].values
        msgStations = [x.replace(' - 765KV', '').capitalize()
                       for x in msgStations]
        msg765 = "All 765 kV nodes of WR were within the IEGC limit."
        if len(msgStations) > 0:
            msg765 = "All 765 kV nodes of WR were within the IEGC limit except few instances at {0}.".format(
                ','.join(msgStations))
    else:
        msgStations = voltData765Df[voltData765Df["data_val"]
                                    > 10]["entity_name"].values
        msgStations = [x.replace(' - 765KV', '').capitalize()
                       for x in msgStations]
        highViolSubstation = voltData765Df.loc[voltData765Df['data_val'].idxmax(
        )]
        msg765 = "High Voltage (greater than 800 kV) at 765 kV substations were observed at {0}. Highest of {1}{2} of time voltage remained above 780 kV at {3} in the month of {4}.".format(
            ', '.join(msgStations), round(highViolSubstation["data_val"], 2), "%", highViolSubstation["entity_name"].replace(' - 765KV', '').capitalize(), month_name)

    # get high voltage violation data for 400 kV
    voltData400 = mRepo.getDailyVoltDataByLevel(
        400, "%Time >420 or 800", startDt, endDt)
    # convert to dataframe
    voltData400Df = pd.DataFrame(voltData400)
    # take only required columns
    voltData400Df = voltData400Df[["entity_name", "data_val"]]
    # convert column to numeric
    voltData400Df['data_val'] = pd.to_numeric(
        voltData400Df['data_val'], errors='coerce')
    # get mean for each substation
    voltData400Df = voltData400Df.groupby("entity_name").mean()
    voltData400Df.reset_index(inplace=True)
    # check if there is violation more than 0.1
    is400MoreThan10Perc = voltData400Df[voltData400Df["data_val"]
                                        > 10].shape[0] > 0
    msg400 = ""
    if not is400MoreThan10Perc:
        msgStations = voltData400Df[voltData400Df["data_val"]
                                    > 0]["entity_name"].values
        msgStations = [x.replace(' - 400KV', '').capitalize()
                       for x in msgStations]
        msg400 = "All 400 kV nodes of WR were within the IEGC limit."
        if len(msgStations) > 0:
            msg400 = "All 400 kV nodes of WR were within the IEGC limit except few instances at {0}.".format(
                ','.join(msgStations))
    else:
        msgStations = voltData400Df[voltData400Df["data_val"]
                                    > 10]["entity_name"].values
        msgStations = [x.replace(' - 400KV', '').capitalize()
                       for x in msgStations]
        highViolSubstation = voltData400Df.loc[voltData400Df['data_val'].idxmax(
        )]
        msg400 = "High Voltage (greater than 420 kV) at 400 kV substations were observed at {0}. Highest of {1}{2} of time voltage remained above 420 kV at {3} in the month of {4}.".format(
            ', '.join(msgStations), round(highViolSubstation["data_val"], 2), "%", highViolSubstation["entity_name"].replace(' - 400KV', '').capitalize(), month_name)
    msg = " ".join([msg765, msg400])
    secData: ISection_1_1_volt = {
        "msg_1_1_volt": msg
    }
    return secData