def file_treatment(id, dfjson, dispatching_info: str): start_date = datetime.datetime.now() try: conn, cur = ConnexionDB() engine = make_engine() Execute_SQL(cur, td.update_files_in_progress, {'id_f': id}) df = pd.read_json(dfjson, typ='frame', orient='table', convert_dates=False, convert_axes=False) Commit(conn) file_type, identification_duration, preparation_duration, normalisation_duration, standardisation_duration, dataframe, df_result = identification.identification_normalisation_standardisation( df, dispatching_info, start_date, "web") df_result['id_f'] = id df_result.to_sql('result', con=engine, index=False, if_exists='append') dataframe['id_f'] = id dataframe.to_sql('normalisation', con=engine, index=False, if_exists='append') if len(dataframe) == 0: kwh_one_year_normal = 0 else: kwh_one_year_normal = round(vd.kwh_on_normalize_df(dataframe), 2) kwh_one_year_standard = round(df_result['kwh'].sum(), 2) Execute_SQL( cur, td.update_files_done, { 'id_f': id, "template": file_type, 'number_line': len(dataframe), "normalisation_duration": identification_duration + preparation_duration + normalisation_duration, "standardisation_duration": standardisation_duration, "kwh_one_year_normal": kwh_one_year_normal, "kwh_one_year_standard": kwh_one_year_standard }) Commit(conn) DeconnexionDB(conn, cur) except Exception as error: conn, cur = ConnexionDB() Execute_SQL(cur, td.update_files_error, {'id_f': id}) Commit(conn) DeconnexionDB(conn, cur) print(error)
def test_template_1(): filename = os.path.join(os.getcwd(), 'files_brut', 'Template1.csv') file_type, identification, preparation, normalisation, standardisation, dataframe, df_result = ident.iden_norm_stand( filename, option) assert dataframe['date_time'][0] == datetime.datetime(year=2016, month=1, day=1, hour=0, minute=55, second=0) assert dataframe['date_time'][len(dataframe) - 1] == datetime.datetime( year=2016, month=12, day=31, hour=23, minute=55, second=0) assert round(df_result['kwh'].sum(), 2) == round(valid.kwh_on_normalize_df(dataframe), 2)
def test_template_2(): filename = os.path.join(os.getcwd(), 'files_brut', 'Template2.csv') filename = "/home/zahra/Simplon/May/Enogrid-ACdC/files_brut/Template2.csv" file_type, identification, preparation, normalisation, standardisation, dataframe, df_result = ident.iden_norm_stand( filename, option) assert dataframe['date_time'][0] == datetime.datetime(year=2018, month=1, day=20, hour=0, minute=0, second=0) assert dataframe['date_time'][len(dataframe) - 1] == datetime.datetime( year=2020, month=1, day=19, hour=23, minute=50, second=0) assert round(df_result['kwh'].sum(), 2) == round(valid.kwh_on_normalize_df(dataframe), 2)
file_type, identification, preparation, normalisation, standardisation, dataframe, df_result = iden_norm_stand( filepath, "standalone") Path(os.path.join(os.getcwd(), "result")).mkdir(parents=True, exist_ok=True) stamp = "_" + datetime.datetime.now().strftime("%d-%m-%Y_%H-%M-%S") filename_result_normalisation = os.path.join( os.getcwd(), "result", "result_normalisation_" + filepath.stem + stamp + ".xlsx") filename_result = os.path.join( os.getcwd(), "result", "result_" + filepath.stem + stamp + ".xlsx") dataframe.to_excel(filename_result_normalisation) df_result.to_excel(filename_result) if len(dataframe) == 0: kwh_one_year_normal = 0 else: kwh_one_year_normal = round(vd.kwh_on_normalize_df(dataframe), 2) kwh_one_year_standard = round(df_result['kwh'].sum(), 2) if kwh_one_year_normal == 0: ppb = "Normalisation incorrecte" else: ppb = abs( int( round( 1000000000 * (1 - kwh_one_year_standard / kwh_one_year_normal), 0))) print("Fichier : ", sys.argv[1]) print("Template : ", file_type) print("Normalisation :", identification + preparation + normalisation) print("Standardisation :", standardisation) print("Part Per Billion :", ppb) except Exception as error: