def my_test():
    compress.empty_folder(os.path.join(setting.data_path, 'trainWeek'))
    compress.empty_folder(os.path.join(setting.data_path, 'predictWeek'))
    train_week = get_train_week()
    pre_week = get_pre_week()
    train_pool = potThread.PotThread(2)
    train_pool.run_pot_threads(merge_train_week, train_week)
    pre_pool = potThread.PotThread(2)
    pre_pool.run_pot_threads(merge_pre_week, pre_week)
Пример #2
0
def test():
    compress.empty_folder(os.path.join(setting.data_path, 'trainMonth'))
    compress.empty_folder(os.path.join(setting.data_path, 'predictMonth'))
    train_month = get_train_month()
    print(train_month)
    pre_month = get_pre_month()
    print(pre_month)
    train_pool = potThread.PotThread(2)
    train_pool.run_pot_threads(merge_train_month, train_month)
    pre_pool = potThread.PotThread(2)
    pre_pool.run_pot_threads(merge_pre_month, pre_month)
Пример #3
0
def build_feature(df):
    start_day = week_extract.get_min_month()
    df_train = df[(df['date'] >= start_day)
                  & (df['date'] < timeOpt.add_months(start_day, 2))]
    df_pre = df[(df['date'] >= timeOpt.add_months(start_day, 1))
                & (df['date'] < timeOpt.add_months(start_day, 3))]
    df_for_churn = set(df[(df['date'] >= timeOpt.add_months(start_day, 2))
                          & (df['date'] < timeOpt.add_months(start_day, 3))]
                       ['esn'].astype('str').values)
    compress.empty_folder(setting.model_path)
    train_result_df = build(df_train, df_for_churn, TRAIN)
    train_result_df.to_csv(os.path.join(setting.model_path, r"trainData.csv"),
                           index=False)
    pre_result_df = build(df_pre, df_for_churn, PREDICT)
    pre_result_df[(pre_result_df['churnLabel'] < 1)].to_csv(os.path.join(
        setting.model_path, r"predictData.csv"),
                                                            index=False)
    return 0
def get_xgboost_predict_result():
    pot_xgboost = PotXGBoost()
    data = pd.read_csv(os.path.join(setting.model_path, 'trainData.csv'),
                       error_bad_lines=False,
                       index_col=False)
    pre = pd.read_csv(os.path.join(setting.model_path, 'predictData.csv'),
                      error_bad_lines=False,
                      index_col=False)
    data = data[setting.parameter_json["train_pre_columns"]]
    pre = pre[setting.parameter_json["train_pre_columns"]]
    is_use_rsrp = setting.load_parameter()['use_rsrp_sinr']
    if not is_use_rsrp.lower() == 'true':
        data = data.drop(setting.rsrp_sinr_columns, axis=1)
        pre = pre.drop(setting.rsrp_sinr_columns, axis=1)
    pot_xgboost.train(data)
    pot_xgboost.predict(pre)
    compress.empty_folder(setting.result_path)
    pot_xgboost.pre_result.to_csv(
        os.path.join(setting.result_path, 'predict_result.csv'))
    importance_df = json_to_df(pot_xgboost.features_importance)
    importance_df.to_csv(
        os.path.join(setting.result_path, 'features_importance.csv'))
Пример #5
0
 def cpe_analysis(self, status):
     try:
         uiThread.UiThread(self.check_health)
         if status[0] == 3:
             self.import_cpe_thread()
             self.xg_pre()
         elif status[0] == 2:
             compress.empty_folder(setting.post_eva_path)
             self.import_with_suite_thread()
             self.xg_pre()
             post_evaluate.main(True)
         compress.compress_result()
         self.analysis_button['state'] = NORMAL
         self.export_button['state'] = NORMAL
         self.show_log(timeOpt.get_time() +
                       ": click Export button to export the data.")
         self.THREAD_FLAG = False
     except Exception as e:
         self.THREAD_FLAG = False
         self.show_log(timeOpt.get_time() + ": Analysis failed!\n" + str(e))
         tk.messagebox.showerror("ERROR",
                                 message='Analysis failed!\n' + str(e))
         self.analysis_button['state'] = NORMAL
         self.export_button['state'] = NORMAL
def day_extract():
    compress.empty_folder(os.path.join(setting.data_path, 'day'))
    day_dict = get_day_df_dict()
    merge_day_data(day_dict)
def get_extract_data():
    compress.empty_folder(os.path.join(setting.data_path, 'extractData'))
    all_file = compress.get_all_csv_file(compress.cpe_unzip_path)
    return all_file
Пример #8
0
 def copy_data(self, source, path):
     compress.empty_folder(path)
     self.copy_file(source, path)