def getRemcGraphDataSectionDataDf(configFilePath, configSheetName): # get conf dataframe confDf = pd.read_excel(configFilePath, sheet_name=configSheetName) for stripCol in 'name,pnt,store'.split(','): confDf[stripCol] = confDf[stripCol].str.strip() # initialize results resValsObj = {} # confDf columns should be # name, pnt, store for rowIter in range(confDf.shape[0]): confRow = confDf.iloc[rowIter] printWithTs('graph data processing row number {0}'.format(rowIter+2)) name = confRow['name'] pnt = confRow['pnt'] store = confRow['store'] pntData = getRemcPntData(store, pnt) # replace with zeros if we do not have data in the store if pntData.size == 0: pntData = pd.Series([0 for x in range(96)]) resValsObj[name] = pntData.values return pd.DataFrame(resValsObj)
def getMaxGenInfoSectionDataDf(configFilePath, configSheetName): # get conf dataframe confDf = pd.read_excel(configFilePath, sheet_name=configSheetName) # confDf columns should be # name,installed_capacity,avc_id,act_id,sch_id,type,pooling_station,gen_type confDf['name'] = confDf['name'].str.strip() confDf['type'] = confDf['type'].str.strip() confDf['pooling_station'] = confDf['pooling_station'].str.strip() confDf['gen_type'] = confDf['gen_type'].str.strip() normalPntsConfDf = confDf[(confDf['type'] == 'normal') | ( confDf['type'] == '') | (confDf['type'].isnull())] # initialize results resValsList = [] for rowIter in range(confDf.shape[0]): confRow = confDf.iloc[rowIter] printWithTs('max gen info processing row number {0}'.format(rowIter+2)) timeValSeries = getPntData('HRS') # get the type of row, itcan be dummy / normal / agg_pool / agg_gen_type rowType = confRow['type'] if rowType == 'dummy': # since the row is dummy, just insert a None row into result resValsList.append({"name": confRow['name'], "installed_capacity": None, "max1": None, "max1_perc": None, "max2": None, "max2_perc": None, "max3": None, "max3_perc": None}) continue elif not(pd.isnull(rowType)) and rowType.startswith('agg_'): aggColName = rowType[len('agg_'):] aggIdentifier = confRow[aggColName] confDfForAgg = normalPntsConfDf[normalPntsConfDf[aggColName] == aggIdentifier] actPnt = ','.join(confDfForAgg['act_id'].tolist()) else: actPnt = confRow['act_id'] installedCapacity = confRow['installed_capacity'] max1 = getPntData(actPnt)[5*60:10*60].max() max1Perc = max1*100/installedCapacity max2 = getPntData(actPnt)[10*60:17*60].max() max2Perc = max2*100/installedCapacity max3 = getPntData(actPnt)[17*60:24*60].max() max3Perc = max3*100/installedCapacity resValsList.append({"name": confRow['name'], "installed_capacity": installedCapacity, "max1": max1, "max1_perc": max1Perc, "max2": max2, "max2_perc": max2Perc, "max3": max3, "max3_perc": max3Perc}) return pd.DataFrame(resValsList)
def getRemcStateDaSummSectionDataDf(configFilePath, configSheetName): # get conf dataframe confDf = pd.read_excel(configFilePath, sheet_name=configSheetName) # confDf columns should be # name,type,forecast_pnt,state,gen_type,gen_type2 for stripCol in 'name,type,forecast_pnt,state,gen_type,gen_type2'.split( ','): confDf[stripCol] = confDf[stripCol].str.strip() normalPntsConfDf = confDf[(confDf['type'] == 'normal') | (confDf['type'] == '') | (confDf['type'].isnull())] # initialize results resValsList = [] for rowIter in range(confDf.shape[0]): confRow = confDf.iloc[rowIter] printWithTs( 'REMC State Day Ahead Forecast summary processing row number {0}'. format(rowIter + 2)) # get the type of row, itcan be dummy / normal / agg_gen_type2 / agg_gen_type rowType = confRow['type'] if rowType == 'dummy': # since the row is dummy, just insert a None row into result resValsList.append({ "name": confRow['name'], "max": None, "min": None, "avg": None }) continue elif not (pd.isnull(rowType)) and rowType.startswith('agg_'): aggColName = rowType[len('agg_'):] aggIdentifier = confRow[aggColName] confDfForAgg = normalPntsConfDf[normalPntsConfDf[aggColName] == aggIdentifier] forecastPnt = ','.join(confDfForAgg['forecast_pnt'].tolist()) else: forecastPnt = confRow['forecast_pnt'] forecastSeries = getRemcPntData(FCA_DAY_AHEAD_STORE_NAME, forecastPnt) maxForecast = forecastSeries.max() minForecast = forecastSeries.min() avgForecast = forecastSeries.mean() resValsList.append({ "name": confRow['name'], "max": maxForecast, "min": minForecast, "avg": avgForecast }) return pd.DataFrame(resValsList)
def getRemcIstsErrNumBlksSectionDataDf(configFilePath, configSheetName): # get conf dataframe confDf = pd.read_excel(configFilePath, sheet_name=configSheetName) # confDf columns should be # name,r16_pnt,avc_pnt,actual_pnt,type,pooling_station for stripCol in 'name,r16_pnt,avc_pnt,actual_pnt,type,pooling_station'.split(','): confDf[stripCol] = confDf[stripCol].str.strip() normalPntsConfDf = confDf[(confDf['type'] == 'normal') | ( confDf['type'] == '') | (confDf['type'].isnull())] # initialize results resValsList = [] for rowIter in range(confDf.shape[0]): confRow = confDf.iloc[rowIter] printWithTs( 'REMC ISTS error number of blocks report processing row number {0}'.format(rowIter+2)) # get the type of row, itcan be dummy / normal / agg_pool / agg_gen_type rowType = confRow['type'] if rowType == 'dummy': # since the row is dummy, just insert a None row into result resValsList.append({"name": confRow['name'], "ift": None, "aleasoft": None, "res": None, "enercast": None, "fca": None}) continue elif not(pd.isnull(rowType)) and rowType.startswith('agg_'): aggColName = rowType[len('agg_'):] aggIdentifier = confRow[aggColName] confDfForAgg = normalPntsConfDf[normalPntsConfDf[aggColName] == aggIdentifier] avcPnt = ','.join(confDfForAgg['avc_pnt'].tolist()) actPnt = ','.join(confDfForAgg['actual_pnt'].tolist()) r16Pnt = ','.join(confDfForAgg['r16_pnt'].tolist()) else: avcPnt = confRow['avc_pnt'] actPnt = confRow['actual_pnt'] r16Pnt = confRow['r16_pnt'] iftNumBlksLessThan15 = getNumBlksWithErrLessThan15( IFT_FORECAST_VS_ACTUAL_STORE_NAME, avcPnt, r16Pnt, actPnt) aleaNumBlksLessThan15 = getNumBlksWithErrLessThan15( ALEA_FORECAST_VS_ACTUAL_STORE_NAME, avcPnt, r16Pnt, actPnt) resNumBlksLessThan15 = getNumBlksWithErrLessThan15( RES_FORECAST_VS_ACTUAL_STORE_NAME, avcPnt, r16Pnt, actPnt) enerNumBlksLessThan15 = getNumBlksWithErrLessThan15( ENER_FORECAST_VS_ACTUAL_STORE_NAME, avcPnt, r16Pnt, actPnt) fcaNumBlksLessThan15 = getNumBlksWithErrLessThan15( FCA_FORECAST_VS_ACTUAL_STORE_NAME, avcPnt, r16Pnt, actPnt) resValsList.append({"name": confRow['name'], "ift": iftNumBlksLessThan15, "aleasoft": aleaNumBlksLessThan15, "res": resNumBlksLessThan15, "enercast": enerNumBlksLessThan15, "fca": fcaNumBlksLessThan15}) return pd.DataFrame(resValsList)
def getRemcRegDaSummSectionDataDf(configFilePath, configSheetName): # get conf dataframe confDf = pd.read_excel(configFilePath, sheet_name=configSheetName) # confDf columns should be # name,type,forecast_pnt for stripCol in 'name,type,forecast_pnt'.split(','): confDf[stripCol] = confDf[stripCol].str.strip() # initialize results resValsList = [] for rowIter in range(confDf.shape[0]): confRow = confDf.iloc[rowIter] printWithTs( 'REMC Regional Day Ahead Forecast summary processing row number {0}' .format(rowIter + 2)) # get the type of row, itcan be dummy / normal / agg_pool / agg_gen_type rowType = confRow['type'] if rowType == 'dummy': # since the row is dummy, just insert a None row into result resValsList.append({ "name": confRow['name'], "max": None, "min": None, "avg": None }) continue forecastPnt = confRow['forecast_pnt'] forecastSeries = getRemcPntData(FCA_DAY_AHEAD_STORE_NAME, forecastPnt) maxForecast = forecastSeries.max() minForecast = forecastSeries.min() avgForecast = forecastSeries.mean() resValsList.append({ "name": confRow['name'], "max": maxForecast, "min": minForecast, "avg": avgForecast }) return pd.DataFrame(resValsList)
def getRemcStateErrSummSectionDataDf(configFilePath, configSheetName): # get conf dataframe confDf = pd.read_excel(configFilePath, sheet_name=configSheetName) # confDf columns should be # name,r0_pnt,r16_pnt,actual_pnt,cuf_pnt,avc_pnt,type,state,gen_type,gen_type_2 for stripCol in 'name,r0_pnt,r16_pnt,actual_pnt,avc_pnt,type,state,gen_type,gen_type_2'.split( ','): confDf[stripCol] = confDf[stripCol].str.strip() normalPntsConfDf = confDf[(confDf['type'] == 'normal') | (confDf['type'] == '') | (confDf['type'].isnull())] # initialize results resValsList = [] for rowIter in range(confDf.shape[0]): confRow = confDf.iloc[rowIter] printWithTs( 'REMC state error summary processing row number {0}'.format( rowIter + 2)) # get the type of row, itcan be dummy / normal / agg_pool / agg_gen_type rowType = confRow['type'] if rowType == 'dummy': # since the row is dummy, just insert a None row into result resValsList.append({ "name": confRow['name'], "r0_mape": None, "r0_nrmse": None, "r16_mape": None, "r16_nrmse": None }) continue elif not (pd.isnull(rowType)) and rowType.startswith('agg_'): aggColName = rowType[len('agg_'):] aggIdentifier = confRow[aggColName] confDfForAgg = normalPntsConfDf[normalPntsConfDf[aggColName] == aggIdentifier] avcPnt = ','.join(confDfForAgg['avc_pnt'].tolist()) actPnt = ','.join(confDfForAgg['actual_pnt'].tolist()) r0Pnt = ','.join(confDfForAgg['r0_pnt'].tolist()) r16Pnt = ','.join(confDfForAgg['r16_pnt'].tolist()) else: avcPnt = confRow['avc_pnt'] actPnt = confRow['actual_pnt'] r0Pnt = confRow['r0_pnt'] r16Pnt = confRow['r16_pnt'] avcVals = getRemcPntData(FCA_FORECAST_VS_ACTUAL_STORE_NAME, avcPnt).tolist() r0Vals = getRemcPntData(FCA_FORECAST_VS_ACTUAL_STORE_NAME, r0Pnt).tolist() r16Vals = getRemcPntData(FCA_FORECAST_VS_ACTUAL_STORE_NAME, r16Pnt).tolist() actVals = getRemcPntData(FCA_FORECAST_VS_ACTUAL_STORE_NAME, actPnt).tolist() r0MapePerc = calcMapePerc(actVals, r0Vals, avcVals) r0NrmsePerc = calcNrmsePerc(actVals, r0Vals, avcVals) r16MapePerc = calcMapePerc(actVals, r16Vals, avcVals) r16NrmsePerc = calcNrmsePerc(actVals, r16Vals, avcVals) resValsList.append({ "name": confRow['name'], "r0_mape": r0MapePerc, "r0_nrmse": r0NrmsePerc, "r16_mape": r16MapePerc, "r16_nrmse": r16NrmsePerc }) return pd.DataFrame(resValsList)
def getRemcRegionalR0ErrSectionDataDf(configFilePath, configSheetName): # get conf dataframe confDf = pd.read_excel(configFilePath, sheet_name=configSheetName) # confDf columns should be # name,r0_pnt,actual_pnt,cuf_pnt,avc_pnt,type for stripCol in 'name,r0_pnt,actual_pnt,avc_pnt,type'.split(','): confDf[stripCol] = confDf[stripCol].str.strip() # initialize results resValsList = [] # find the row index of first non dummy row for rowIter in range(len(confDf)): confRow = confDf.iloc[rowIter] rowType = confRow['type'] if rowType == 'dummy': resValsList.append({ "name": confRow['name'], "solar": None, "wind": None, "combined": None }) # get regional rows solarConf = confDf[confDf['name'] == 'solar'].squeeze() windConf = confDf[confDf['name'] == 'wind'].squeeze() combinedConf = confDf[confDf['name'] == 'combined'].squeeze() # get data values regSolActVals = getRemcPntData(FCA_FORECAST_VS_ACTUAL_STORE_NAME, solarConf['actual_pnt']) regSolR0Vals = getRemcPntData(FCA_FORECAST_VS_ACTUAL_STORE_NAME, solarConf['r0_pnt']) regSolAvcVals = getRemcPntData(FCA_FORECAST_VS_ACTUAL_STORE_NAME, solarConf['avc_pnt']) regWindActVals = getRemcPntData(FCA_FORECAST_VS_ACTUAL_STORE_NAME, windConf['actual_pnt']) regWindR0Vals = getRemcPntData(FCA_FORECAST_VS_ACTUAL_STORE_NAME, windConf['r0_pnt']) regWindAvcVals = getRemcPntData(FCA_FORECAST_VS_ACTUAL_STORE_NAME, windConf['avc_pnt']) regCombActVals = getRemcPntData(FCA_FORECAST_VS_ACTUAL_STORE_NAME, combinedConf['actual_pnt']) regCombR0Vals = getRemcPntData(FCA_FORECAST_VS_ACTUAL_STORE_NAME, combinedConf['r0_pnt']) regCombAvcVals = getRemcPntData(FCA_FORECAST_VS_ACTUAL_STORE_NAME, combinedConf['avc_pnt']) # get ISTS rows solarConf = confDf[confDf['name'] == 'ists_solar'].squeeze() windConf = confDf[confDf['name'] == 'ists_wind'].squeeze() combinedConf = confDf[confDf['name'] == 'ists_combined'].squeeze() # get data values istsSolActVals = getRemcPntData(FCA_FORECAST_VS_ACTUAL_STORE_NAME, solarConf['actual_pnt']) istsSolR0Vals = getRemcPntData(FCA_FORECAST_VS_ACTUAL_STORE_NAME, solarConf['r0_pnt']) istsSolAvcVals = getRemcPntData(FCA_FORECAST_VS_ACTUAL_STORE_NAME, solarConf['avc_pnt']) istsWindActVals = getRemcPntData(FCA_FORECAST_VS_ACTUAL_STORE_NAME, windConf['actual_pnt']) istsWindR0Vals = getRemcPntData(FCA_FORECAST_VS_ACTUAL_STORE_NAME, windConf['r0_pnt']) istsWindAvcVals = getRemcPntData(FCA_FORECAST_VS_ACTUAL_STORE_NAME, windConf['avc_pnt']) istsCombActVals = getRemcPntData(FCA_FORECAST_VS_ACTUAL_STORE_NAME, combinedConf['actual_pnt']) istsCombR0Vals = getRemcPntData(FCA_FORECAST_VS_ACTUAL_STORE_NAME, combinedConf['r0_pnt']) istsCombAvcVals = getRemcPntData(FCA_FORECAST_VS_ACTUAL_STORE_NAME, combinedConf['avc_pnt']) # calculate the output rows for region # calculate regional solar mape for r0 regSolR0MapePerc = calcMapePerc(regSolActVals, regSolR0Vals, regSolAvcVals) # calculate regional solar nrmse for r0 regSolR0NrmsePerc = calcNrmsePerc(regSolActVals, regSolR0Vals, regSolAvcVals) # calculate regional wind mape for r0 regWindR0MapePerc = calcMapePerc(regWindActVals, regWindR0Vals, regWindAvcVals) # calculate regional wind nrmse for r0 regWindR0NrmsePerc = calcNrmsePerc(regWindActVals, regWindR0Vals, regWindAvcVals) # calculate regional combined mape for r0 regCombR0MapePerc = calcMapePerc(regCombActVals, regCombR0Vals, regCombAvcVals) # calculate regional combined nrmse for r0 regCombR0NrmsePerc = calcNrmsePerc(regCombActVals, regCombR0Vals, regCombAvcVals) # calculate the output rows for ists # calculate ists solar mape for r0 istsSolR0MapePerc = calcMapePerc(istsSolActVals, istsSolR0Vals, istsSolAvcVals) # calculate ists solar nrmse for r0 istsSolR0NrmsePerc = calcNrmsePerc(istsSolActVals, istsSolR0Vals, istsSolAvcVals) # calculate ists wind mape for r0 istsWindR0MapePerc = calcMapePerc(istsWindActVals, istsWindR0Vals, istsWindAvcVals) # calculate ists wind nrmse for r0 istsWindR0NrmsePerc = calcNrmsePerc(istsWindActVals, istsWindR0Vals, istsWindAvcVals) # calculate ists combined mape for r0 istsCombR0MapePerc = calcMapePerc(istsCombActVals, istsCombR0Vals, istsCombAvcVals) # calculate ists combined nrmse for r0 istsCombR0NrmsePerc = calcNrmsePerc(istsCombActVals, istsCombR0Vals, istsCombAvcVals) printWithTs( 'Processing REMC Regional R0 Error summary Section at row {0}'.format( len(resValsList) + 1)) # create result dataframe rows resValsList.extend([{ "name": "MAPE", "solar": regSolR0MapePerc, "wind": regWindR0MapePerc, "combined": regCombR0MapePerc, "istsSolar": istsSolR0MapePerc, "istsWind": istsWindR0MapePerc, "istsCombined": istsCombR0MapePerc }, { "name": "NRMSE", "solar": regSolR0NrmsePerc, "wind": regWindR0NrmsePerc, "combined": regCombR0NrmsePerc, "istsSolar": istsSolR0NrmsePerc, "istsWind": istsWindR0NrmsePerc, "istsCombined": istsCombR0NrmsePerc }]) return pd.DataFrame(resValsList)
def getRegProfSectionDataDf(configFilePath, configSheetName): # get conf dataframe confDf = pd.read_excel(configFilePath, sheet_name=configSheetName) # confDf columns should be # name, capacity, avc_point, actual_point, sch_point, type confDf['name'] = confDf['name'].str.strip() confDf['type'] = confDf['type'].str.strip() # initialize results resValsList = [] for rowIter in range(confDf.shape[0]): printWithTs( 'regional profile processing row number {0}'.format(rowIter + 2)) confRow = confDf.iloc[rowIter] # get the type of row, itcan be dummy / normal rowType = confRow['type'] if rowType == 'dummy': # since the row is dummy, just insert a None row into result resValsList.append({ "name": confRow['name'], "installed_capacity": None, "max_avc": None, "day_max_actual": None, "day_max_actual_time": None, "day_min_actual": None, "day_min_actual_time": None, "sch_mu": None, "act_mu": None, "dev_mu": None, "cuf": None }) continue timeValSeries = getPntData('HRS') actPnt = confRow['actual_point'] avcPnt = confRow['avc_point'] if ((avcPnt == '') or pd.isnull(avcPnt)): maxAvc = None else: maxAvc = getRemcPntData(FCA_FORECAST_VS_ACTUAL_STORE_NAME, avcPnt).max() dayMaxActual = getPntData(actPnt).max() dayMaxActualTime = timeValSeries.iloc[getPntData(actPnt).idxmax()] dayMinActual = getPntData(actPnt).min() dayMinActualTime = timeValSeries.iloc[getPntData(actPnt).idxmin()] schMu = getPntData(confRow['sch_point']).mean() * 0.024 actMu = getPntData(actPnt).mean() * 0.024 devMu = actMu - schMu installedCapacity = confRow['installed_capacity'] cufPerc = (actMu * 100000) / (24 * installedCapacity) resValsList.append({ "name": confRow['name'], "installed_capacity": installedCapacity, "max_avc": maxAvc, "day_max_actual": dayMaxActual, "day_max_actual_time": dayMaxActualTime, "day_min_actual": dayMinActual, "day_min_actual_time": dayMinActualTime, "sch_mu": schMu, "act_mu": actMu, "dev_mu": devMu, "cuf": cufPerc }) return pd.DataFrame(resValsList)
def getVoltProfSectionDataDf(configFilePath, configSheetName): # get conf dataframe confDf = pd.read_excel(configFilePath, sheet_name=configSheetName) # initialize results resValsList = [] # confDf columns should be # name,400_kv_pnt,220_kv_pnt,type for rowIter in range(confDf.shape[0]): confRow = confDf.iloc[rowIter] printWithTs( 'voltage profile processing row number {0}'.format(rowIter + 2)) rowType = confRow['type'] if rowType == 'dummy': resValsList.append({ "name": confRow['name'], "400_max": None, "400_min": None, "400_avg": None, "220_max": None, "220_min": None, "220_avg": None }) continue pnt400 = confRow['400_kv_pnt'] max400 = None min400 = None avg400 = None pnt220 = confRow['220_kv_pnt'] max220 = None min220 = None avg220 = None # handle none point id in this section config # get 400 kV values if not (pd.isnull(pnt400)): max400 = getPntData(pnt400).max() min400 = getPntData(pnt400).min() avg400 = getPntData(pnt400).mean() # handle none point id in this section config # get 220 kV values if not (pd.isnull(pnt220)): max220 = getPntData(pnt220).max() min220 = getPntData(pnt220).min() avg220 = getPntData(pnt220).mean() resValsList.append({ "name": confRow['name'], "400_max": max400, "400_min": min400, "400_avg": avg400, "220_max": max220, "220_min": min220, "220_avg": avg220 }) return pd.DataFrame(resValsList)
def getIstsGenSectionDataDf(configFilePath, configSheetName): # get conf dataframe confDf = pd.read_excel(configFilePath, sheet_name=configSheetName) # confDf columns should be # name,installed_capacity,avc_id,act_id,sch_id,type,pooling_station,gen_type confDf['name'] = confDf['name'].str.strip() confDf['type'] = confDf['type'].str.strip() confDf['pooling_station'] = confDf['pooling_station'].str.strip() confDf['gen_type'] = confDf['gen_type'].str.strip() normalPntsConfDf = confDf[(confDf['type'] == 'normal') | (confDf['type'] == '') | (confDf['type'].isnull())] # initialize results resValsList = [] for rowIter in range(confDf.shape[0]): confRow = confDf.iloc[rowIter] printWithTs('ists gen processing row number {0}'.format(rowIter + 2)) timeValSeries = getPntData('HRS') # get the type of row, itcan be dummy / normal / agg_pool / agg_gen_type rowType = confRow['type'] if rowType == 'dummy': # since the row is dummy, just insert a None row into result resValsList.append({ "name": confRow['name'], "installed_capacity": None, "max_avc": None, "day_max_actual": None, "day_max_actual_time": None, "day_min_actual": None, "day_min_actual_time": None, "sch_mu": None, "act_mu": None, "dev_mu": None, "cuf": None }) continue elif not (pd.isnull(rowType)) and rowType.startswith('agg_'): aggColName = rowType[len('agg_'):] aggIdentifier = confRow[aggColName] confDfForAgg = normalPntsConfDf[normalPntsConfDf[aggColName] == aggIdentifier] avcPnt = ','.join(confDfForAgg['avc_id'].dropna().tolist()) actPnt = ','.join(confDfForAgg['act_id'].tolist()) schPnt = ','.join(confDfForAgg['sch_id'].tolist()) else: avcPnt = confRow['avc_id'] actPnt = confRow['act_id'] schPnt = confRow['sch_id'] if ((avcPnt == '') or pd.isnull(avcPnt)): maxAvc = None else: maxAvc = getRemcPntData(FCA_FORECAST_VS_ACTUAL_STORE_NAME, avcPnt).max() dayMaxActual = getPntData(actPnt).max() dayMaxActualTime = timeValSeries.iloc[getPntData(actPnt).idxmax()] dayMinActual = getPntData(actPnt).min() dayMinActualTime = timeValSeries.iloc[getPntData(actPnt).idxmin()] schMu = getPntData(schPnt).mean() * 0.024 actMu = getPntData(actPnt).mean() * 0.024 devMu = actMu - schMu installedCapacity = confRow['installed_capacity'] cufPerc = (actMu * 100000) / (24 * installedCapacity) resValsList.append({ "name": confRow['name'], "installed_capacity": installedCapacity, "max_avc": maxAvc, "day_max_actual": dayMaxActual, "day_max_actual_time": dayMaxActualTime, "day_min_actual": dayMinActual, "day_min_actual_time": dayMinActualTime, "sch_mu": schMu, "act_mu": actMu, "dev_mu": devMu, "cuf": cufPerc }) return pd.DataFrame(resValsList)
from report_generators.remc_state_da_section import populateRemcStateDaSummSectionData from report_generators.remc_graph_data_section import populateRemcGraphDataSectionData from data_fetchers import inp_ts_data_store from data_fetchers.remc_data_store import loadRemcDataStore, deleteRemcDataStore from data_fetchers.remc_data_store import FCA_FORECAST_VS_ACTUAL_STORE_NAME, FCA_DAY_AHEAD_STORE_NAME, FCA_FORECAST_VS_ACTUAL_PREV_STORE_NAME from data_fetchers.remc_data_store import IFT_FORECAST_VS_ACTUAL_STORE_NAME, IFT_DAY_AHEAD_STORE_NAME from data_fetchers.remc_data_store import ALEA_FORECAST_VS_ACTUAL_STORE_NAME, ALEA_DAY_AHEAD_STORE_NAME from data_fetchers.remc_data_store import RES_FORECAST_VS_ACTUAL_STORE_NAME, RES_DAY_AHEAD_STORE_NAME from data_fetchers.remc_data_store import ENER_FORECAST_VS_ACTUAL_STORE_NAME, ENER_DAY_AHEAD_STORE_NAME import datetime as dt import argparse from utils.printUtils import printWithTs from report_generators.paste_report_data import pasteDataToTemplateFile from report_generators.nldc_report_generator import generateNldcReport, transferNldcRepToSftpLocation printWithTs('imports complete...', clr='green') # %% printWithTs('loading SCADA Total ISTS Gen data...', clr='magenta') # initialize timeseries datastore inp_ts_data_store.loadGenTsData() # x = inp_ts_data_store.tsDataDf printWithTs('done loading SCADA Total ISTS Gen data store...', clr='green') # %% printWithTs('started loading REMC FCA Forecast Vs Actual data store...', clr='magenta') # initialize REMC FCA forecast Vs actual timeseries datastore loadRemcDataStore(FCA_FORECAST_VS_ACTUAL_STORE_NAME) # x = inp_ts_data_store.tsDataDf printWithTs('REMC FCA Forecast Vs Actual data store loading complete...',
def pasteDataToTemplateFile(dataFile, templateFile): printWithTs('loading data file data into a dataframe', clr='magenta') # open an Excel file and return a workbook dataWb = load_workbook(dataFile, read_only=True) dataFileSheetNames = [sht for sht in dataWb.sheetnames] printWithTs('loading template file', clr='magenta') templWb = load_workbook(templateFile) templFileSheetNames = [sht for sht in templWb.sheetnames] printWithTs('started pasting data to template', clr='magenta') # print(dataFileSheetNames) # iterate through each sheet of data file for copying # https://www.geeksforgeeks.org/python-how-to-copy-data-from-one-excel-sheet-to-another/ for dataShtName in dataFileSheetNames: printWithTs('pasting data sheet {0}'.format(dataShtName)) # skip sheet of data file if same sheet absent in template file if not (dataShtName in templFileSheetNames): printWithTs('skipping sheet paste as not present in template file') continue # copy paste cell values to template sheet dataSht = dataWb[dataShtName] maxRows = dataSht.max_row maxCols = dataSht.max_column printWithTs('maxRows = {0}, maxCols = {1}'.format(maxRows, maxCols)) for rowIter in range(1, maxRows + 1): # check if 1st column value is 'dummy**' if dataSht.cell(row=rowIter, column=1).value == 'dummy**': continue # copy cell values from data sheet to template sheet for colIter in range(1, maxCols + 1): printWithTs('pasting row={0}, col={1}'.format( rowIter, colIter)) cellVal = dataSht.cell(row=rowIter, column=colIter).value templWb[dataShtName].cell(row=rowIter, column=colIter).value = cellVal printWithTs('done pasting data to template', clr='green') # saving the destination excel file printWithTs('Saving report template after pasting') templWb.save(str(templateFile)) dataWb.close()