コード例 #1
0
ファイル: train_team.py プロジェクト: Naoki1101/kaggle-riiid
def main():
    t = Timer()
    seed_everything(cfg.common.seed)

    logger_path.mkdir(exist_ok=True)
    logging.basicConfig(filename=logger_path / 'train.log',
                        level=logging.DEBUG)

    dh.save(logger_path / 'config.yml', cfg)
    dh.save(logger_path / 'features.yml', features_params)

    with t.timer('load data'):
        train_x = dh.load('../data/team/X_tra_wo_lec_20M.feather')
        val_x = dh.load('../data/team/X_val_wo_lec.feather')

        train_x['is_val'] = 0
        val_x['is_val'] = 1

        train_x = pd.concat([train_x, val_x],
                            axis=0,
                            sort=False,
                            ignore_index=True)
        train_y = train_x[const.TARGET_COLS]

        use_row_id = train_x['row_id'].values
        val_idx = train_x[train_x['is_val'] == 1].index
        drop_cols = set(train_x.columns) - set(features)
        train_x = train_x.drop(drop_cols, axis=1)

    with t.timer('load additional features'):
        add_df = pd.DataFrame(index=train_x.index)

        additional_cols = set(features) - set(train_x.columns)
        for col in additional_cols:
            feat_df = pd.read_feather(f'../features/{col}_train.feather')
            add_df[col] = feat_df.loc[use_row_id, col].values

        add_df = reduce_mem_usage(add_df)

        train_x = pd.concat([train_x, add_df], axis=1)

    with t.timer('preprocessing'):
        pass

    with t.timer('make folds'):
        fold_df = pd.DataFrame(index=range(len(train_x)))
        fold_df['fold_0'] = 0
        fold_df.loc[val_idx, 'fold_0'] += 1

    with t.timer('drop index'):
        if cfg.common.drop is not None:
            drop_idx = factory.get_drop_idx(cfg.common.drop)
            train_x = train_x.drop(drop_idx, axis=0).reset_index(drop=True)
            train_y = train_y.drop(drop_idx, axis=0).reset_index(drop=True)
            fold_df = fold_df.drop(drop_idx, axis=0).reset_index(drop=True)

    with t.timer('train and predict'):
        trainer = Trainer(cfg)
        cv = trainer.train(train_df=train_x,
                           target_df=train_y,
                           fold_df=fold_df)
        trainer.save(run_name)

        run_name_cv = f'{run_name}_{cv:.3f}'
        logger_path.rename(f'../logs/{run_name_cv}')
        logging.disable(logging.FATAL)

    with t.timer('kaggle api'):
        kaggle = Kaggle(cfg, run_name_cv)
        if cfg.common.kaggle.data:
            kaggle.create_dataset()
        if cfg.common.kaggle.notebook:
            kaggle.push_notebook()

    with t.timer('notify'):
        process_minutes = t.get_processing_time()
        notificator = Notificator(run_name=run_name_cv,
                                  model_name=cfg.model.name,
                                  cv=round(cv, 4),
                                  process_time=round(process_minutes, 2),
                                  comment=comment,
                                  params=notify_params)
        notificator.send_line()
        notificator.send_notion()
        # notificator.send_slack()

    with t.timer('git'):
        git = Git(run_name=run_name_cv)
        git.push()
        git.save_hash()
コード例 #2
0
def main():
    t = Timer()
    seed_everything(cfg.common.seed)

    logger_path.mkdir(exist_ok=True)
    logging.basicConfig(filename=logger_path / 'train.log',
                        level=logging.DEBUG)

    dh.save(logger_path / 'config.yml', cfg)
    dh.save(logger_path / 'features.yml', features_params)

    with t.timer('load data'):
        train_x = factory.get_features(features, cfg.data.loader.train)
        train_y = factory.get_target(cfg.data.target)

    # with t.timer('add oof'):
    #     if cfg.data.features.oof.name is not None:
    #         oof, preds = factory.get_result(cfg.data.features.oof.name, cfg)

    #         for i in range(oof.shape[1]):
    #             oof_col_name = f'oof_{const.TARGET_COLS[i]}'
    #             train_x[oof_col_name] = oof[:, i]
    #             features.append(oof_col_name)

    with t.timer('make folds'):
        valid_idx = np.load('../data/processed/cv1_valid.npy')
        # valid_idx = np.load('../data/processed/cv1_valid_dropped.npy')

        fold_df = pd.DataFrame(index=range(len(train_x)))
        fold_df['fold_0'] = 0
        fold_df.loc[valid_idx, 'fold_0'] += 1

    with t.timer('drop index'):
        if cfg.common.drop is not None:
            drop_idx = factory.get_drop_idx(cfg.common.drop)
            train_x = train_x.drop(drop_idx, axis=0).reset_index(drop=True)
            train_y = train_y.drop(drop_idx, axis=0).reset_index(drop=True)
            fold_df = fold_df.drop(drop_idx, axis=0).reset_index(drop=True)

        if cfg.data.sampling:
            drop_rows = np.random.choice(
                fold_df[fold_df['fold_0'] == 0].index.values, 20_000_000)
            train_x = train_x.drop(drop_rows, axis=0).reset_index(drop=True)
            train_y = train_y.drop(drop_rows, axis=0).reset_index(drop=True)
            fold_df = fold_df.drop(drop_rows, axis=0).reset_index(drop=True)

    with t.timer('train and predict'):
        trainer = Trainer(cfg)
        cv = trainer.train(train_df=train_x,
                           target_df=train_y,
                           fold_df=fold_df)
        trainer.save(run_name)

        run_name_cv = f'{run_name}_{cv:.3f}'
        logger_path.rename(f'../logs/{run_name_cv}')
        logging.disable(logging.FATAL)

    with t.timer('kaggle api'):
        kaggle = Kaggle(cfg, run_name_cv)
        if cfg.common.kaggle.data:
            kaggle.create_dataset()
        if cfg.common.kaggle.notebook:
            kaggle.push_notebook()

    with t.timer('notify'):
        process_minutes = t.get_processing_time()
        notificator = Notificator(run_name=run_name_cv,
                                  model_name=cfg.model.name,
                                  cv=round(cv, 4),
                                  process_time=round(process_minutes, 2),
                                  comment=comment,
                                  params=notify_params)
        notificator.send_line()
        notificator.send_notion()
        # notificator.send_slack()

    with t.timer('git'):
        git = Git(run_name=run_name_cv)
        git.push()
        git.save_hash()
コード例 #3
0
def main():
    t = Timer()
    seed_everything(cfg.common.seed)

    logger_path.mkdir(exist_ok=True)
    logging.basicConfig(filename=logger_path / 'train.log',
                        level=logging.DEBUG)

    dh.save(logger_path / 'config.yml', cfg)

    with t.timer('load data'):
        root = Path(cfg.common.input_root)
        train_df = dh.load(root / cfg.common.img_file)

    with t.timer('create target dataframe'):
        ordinal_target = np.zeros((len(train_df), 6))

        for idx in train_df.index:
            target = train_df.loc[idx, 'isup_grade']
            ordinal_target[idx, :] = [
                1 if target >= i else 0 for i in range(6)
            ]

        target_df = pd.DataFrame(ordinal_target,
                                 columns=[f'target_{i}' for i in range(6)])

    with t.timer('drop several rows'):
        if cfg.common.drop.name is not None:
            drop_idx = dh.load(f'../pickle/{cfg.common.drop.name}.npy')
            train_df = train_df.drop(drop_idx, axis=0).reset_index(drop=True)
            target_df = target_df.drop(drop_idx, axis=0).reset_index(drop=True)

    with t.timer('make folds'):
        train_x_all = train_df.drop('isup_grade', axis=1)
        train_y_all = train_df['isup_grade']
        if cfg.model.n_classes == 1:
            train_y_all = train_y_all.astype(float)
        trn_x, val_x, trn_y, val_y = train_test_split(
            train_x_all,
            target_df,
            test_size=0.2,
            shuffle=True,
            random_state=cfg.common.seed,
            stratify=train_df['isup_grade'])

    with t.timer('train model'):
        result = train_ordinal_reg(run_name, trn_x, val_x, trn_y, val_y, cfg)

    logging.disable(logging.FATAL)
    run_name_cv = f'{run_name}_{result["cv"]:.3f}'
    logger_path.rename(f'../logs/{run_name_cv}')

    with t.timer('kaggle api'):
        kaggle = Kaggle(cfg.compe.compe_name, run_name_cv)
        if cfg.common.kaggle.data:
            kaggle.create_dataset()
        if cfg.common.kaggle.notebook:
            kaggle.push_notebook()

    with t.timer('notify'):
        process_minutes = t.get_processing_time()
        message = f'''{model_name}\ncv: {result["cv"]:.3f}\ntime: {process_minutes:.2f}[h]'''
        send_line(notify_params.line.token, message)

        notion = Notion(token=notify_params.notion.token_v2)
        notion.set_url(url=notify_params.notion.url)
        notion.insert_rows({
            'name': run_name_cv,
            'created': now,
            'model': cfg.model.name,
            'local_cv': round(result['cv'], 4),
            'time': process_minutes,
            'comment': comment
        })
コード例 #4
0
def main():
    t = Timer()
    seed_everything(cfg.common.seed)

    logger_path.mkdir(exist_ok=True)
    logging.basicConfig(filename=logger_path / 'train.log',
                        level=logging.DEBUG)

    dh.save(logger_path / 'config.yml', cfg)

    with t.timer('load data'):
        if cfg.common.debug:
            train_df = pd.read_csv(const.INPUT_DATA_DIR / 'train.csv',
                                   dtype=const.DTYPE,
                                   nrows=5_000_000)
        else:
            train_df = pd.read_csv(const.INPUT_DATA_DIR / 'train.csv',
                                   dtype=const.DTYPE)

    with t.timer('preprocess'):
        questions_df = pd.read_csv(const.INPUT_DATA_DIR / 'questions.csv')
        q2p = dict(questions_df[['question_id', 'part']].values)
        train_df['part'] = train_df['content_id'].map(q2p)

        train_df['prior_question_had_explanation'] = train_df[
            'prior_question_had_explanation'].astype(float)

        te_content_df = pd.read_feather(
            '../features/te_content_id_by_answered_correctly_train.feather')
        avg_u_target_df = pd.read_feather(
            '../features/answered_correctly_avg_u_train.feather')

        if cfg.common.debug:
            te_content_df = te_content_df.iloc[:5_000_000]
            avg_u_target_df = avg_u_target_df.iloc[:5_000_000]

        train_df['te_content_id_by_answered_correctly'] = te_content_df[
            'te_content_id_by_answered_correctly']
        train_df['answered_correctly_avg_u'] = avg_u_target_df[
            'answered_correctly_avg_u']

    with t.timer('make folds'):
        valid_idx = np.load('../data/processed/cv1_valid_v2.npy')
        if cfg.common.debug:
            valid_idx = valid_idx[np.where(valid_idx < len(train_df))]

        fold_df = pd.DataFrame(index=range(len(train_df)))
        fold_df['fold_0'] = 0
        fold_df.loc[valid_idx, 'fold_0'] += 1

    with t.timer('drop index'):
        if cfg.common.drop:
            drop_idx = factory.get_drop_idx(cfg.common.drop)
            if cfg.common.debug:
                drop_idx = drop_idx[np.where(drop_idx < len(train_df))]
            train_df = train_df.drop(drop_idx, axis=0).reset_index(drop=True)
            fold_df = fold_df.drop(drop_idx, axis=0).reset_index(drop=True)

        train_df['step'] = train_df.groupby(
            'user_id').cumcount() // cfg.data.train.step_size
        train_df['user_step_id'] = train_df['user_id'].astype(
            str) + '__' + train_df['step'].astype(str)

    with t.timer('train model'):
        trainer = NNTrainer(run_name, fold_df, cfg)
        cv = trainer.train(train_df, target_df=train_df[const.TARGET_COLS[0]])
        trainer.save()

        run_name_cv = f'{run_name}_{cv:.4f}'
        logger_path.rename(f'../logs/{run_name_cv}')
        logging.disable(logging.FATAL)

    with t.timer('kaggle api'):
        kaggle = Kaggle(cfg, run_name_cv)
        if cfg.common.kaggle.data:
            kaggle.create_dataset()
        if cfg.common.kaggle.notebook:
            kaggle.push_notebook()

    with t.timer('notify'):
        process_minutes = t.get_processing_time()
        notificator = Notificator(run_name=run_name_cv,
                                  model_name=cfg.model.backbone,
                                  cv=round(cv, 4),
                                  process_time=round(process_minutes, 2),
                                  comment=comment,
                                  params=notify_params)
        notificator.send_line()
        notificator.send_notion()
        # notificator.send_slack()

    with t.timer('git'):
        git = Git(run_name=run_name_cv)
        git.push()
        git.save_hash()
コード例 #5
0
def main():
    t = Timer()
    seed_everything(cfg.common.seed)

    logger_path.mkdir(exist_ok=True)

    dh.save(logger_path / 'config.yml', cfg)

    with t.timer('load data'):
        train_df = dh.load('../data/team/X_tra_wo_lec_20M.feather')
        val_df = dh.load('../data/team/X_val_wo_lec.feather')

        train_df['is_val'] = 0
        val_df['is_val'] = 1

        train_df = pd.concat([train_df, val_df],
                             axis=0,
                             sort=False,
                             ignore_index=True)
        val_idx = train_df[train_df['is_val'] == 1].index

        del train_df
        gc.collect()

    with t.timer('drop index'):
        drop_idx = np.array([])
        if cfg.common.drop is not None:
            drop_idx = factory.get_drop_idx(cfg.common.drop)
            val_df = val_df.drop(drop_idx, axis=0).reset_index(drop=True)

    with t.timer('load oof and preds'):
        oof_list = []
        # preds_list = []

        for i, log_name in enumerate(sorted(cfg.models)):
            log_dir = Path(f'../logs/{log_name}')
            model_oof = factory.get_result(log_dir, cfg, data_type='train')

            if len(drop_idx) > 0:
                model_oof = np.delete(model_oof, drop_idx, axis=0)

            if cfg.preprocess.rank:
                model_oof = np.argsort(np.argsort(model_oof)) / len(model_oof)

            oof_list.append(model_oof[val_idx])

    with t.timer('optimize model weight'):
        metric = factory.get_metrics(cfg.common.metrics.name)

        best_weight_array = np.zeros(len(oof_list))
        for target_idx, target in enumerate(const.TARGET_COLS):
            best_weight = opt_ensemble_weight(cfg, val_df[target], oof_list,
                                              metric)

    with t.timer('ensemble'):
        ensemble_oof = np.zeros(len(val_df))

        cv_list = []
        for model_idx, weight in enumerate(best_weight):
            ensemble_oof += oof_list[model_idx] * weight

        cv = metric(val_df[const.TARGET_COLS[0]], ensemble_oof)
        cv_list.append(cv)

        dh.save(f'../logs/{run_name}/oof.npy', ensemble_oof)
        # dh.save(f'../logs/{run_name}/raw_preds.npy', ensemble_preds)
        dh.save(f'../logs/{run_name}/best_weight.npy', best_weight_array)

        cv = np.mean(cv_list)
        run_name_cv = f'{run_name}_{cv:.6f}'
        logger_path.rename(f'../logs/{run_name_cv}')

        print('\n\n===================================\n')
        print(f'CV: {cv:.6f}')
        print('\n===================================\n\n')

    with t.timer('kaggle api'):
        kaggle = Kaggle(cfg, run_name_cv)
        if cfg.common.kaggle.data:
            kaggle.create_dataset()
        if cfg.common.kaggle.notebook:
            kaggle.push_notebook()

    with t.timer('notify'):
        process_minutes = t.get_processing_time()
        notificator = Notificator(run_name=run_name_cv,
                                  model_name='ensemble',
                                  cv=round(cv, 4),
                                  process_time=round(process_minutes, 2),
                                  comment=comment,
                                  params=notify_params)
        notificator.send_line()
        notificator.send_notion()
        # notificator.send_slack()

    with t.timer('git'):
        git = Git(run_name=run_name_cv)
        git.push()
        git.save_hash()
コード例 #6
0
def main():
    t = Timer()
    seed_everything(cfg.common.seed)

    logger_path.mkdir(exist_ok=True)
    logging.basicConfig(filename=logger_path / 'train.log',
                        level=logging.DEBUG)

    dh.save(logger_path / 'config.yml', cfg)

    with t.timer('load data'):
        root = Path(cfg.common.input_root)
        train_df = dh.load(root / cfg.common.img_file)

    with t.timer('drop several rows'):
        if cfg.common.drop is not None:
            drop_idx_list = []
            for drop_name in cfg.common.drop:
                drop_idx = dh.load(f'../pickle/{drop_name}.npy')
                drop_idx_list.append(drop_idx)
            all_drop_idx = np.unique(np.concatenate(drop_idx_list))
            train_df = train_df.drop(all_drop_idx,
                                     axis=0).reset_index(drop=True)

    with t.timer('make folds'):
        train_x_all = train_df.drop('isup_grade', axis=1)
        train_y_all = train_df['isup_grade']
        if cfg.model.n_classes == 1:
            train_y_all = train_y_all.astype(float)
        trn_x, val_x, trn_y, val_y = train_test_split(
            train_x_all,
            train_y_all,
            test_size=0.2,
            shuffle=True,
            random_state=cfg.common.seed,
            stratify=train_df['isup_grade'])

    with t.timer('train model'):
        result = train_cnn(run_name, trn_x, val_x, trn_y, val_y, cfg)

    logging.disable(logging.FATAL)
    run_name_cv = f'{run_name}_{result["cv"]:.3f}'
    logger_path.rename(f'../logs/{run_name_cv}')

    with t.timer('kaggle api'):
        kaggle = Kaggle(cfg.compe.compe_name, run_name_cv)
        if cfg.common.kaggle.data:
            kaggle.create_dataset()
        if cfg.common.kaggle.notebook:
            kaggle.push_notebook()

    with t.timer('notify'):
        process_minutes = t.get_processing_time()
        message = f'''{model_name}\ncv: {result["cv"]:.3f}\ntime: {process_minutes:.2f}[h]'''
        send_line(notify_params.line.token, message)

        notion = Notion(token=notify_params.notion.token_v2)
        notion.set_url(url=notify_params.notion.url)
        notion.insert_rows({
            'name': run_name_cv,
            'created': now,
            'model': cfg.model.name,
            'local_cv': round(result['cv'], 4),
            'time': process_minutes,
            'comment': comment
        })