def run_bench(config): log_dir = config2str(config) log_dir = os.path.join('tests', config.env_name, log_dir) # Create env envs = [ GymEnv(env_name=config.env_name, running_normalize_states=config.running_normalize_states, running_scale_rewards=config.running_scale_rewards) for _ in range(config.num_envs) ] env = ParallelEnv(envs) # Define networks configs policy_nn_config = Config(body=[ dict(func=nn.Linear, out_features=64), dict(func=config.activation), dict(func=nn.Linear, in_features=64, out_features=64), dict(func=config.activation) ], head=[dict(func=ActionLinear)]) value_nn_config = Config(body=[ dict(func=nn.Linear, out_features=64), dict(func=config.activation), dict(func=nn.Linear, in_features=64, out_features=64), dict(func=config.activation) ], head=[dict(func=nn.Linear, out_features=1)]) # Create Models policy_model_config = Config(nn_config=policy_nn_config) policy_model = PPOClipModel.from_config( config=policy_model_config, env=env, opt_params=config.policy_opt_params, clip_grad_norm=config.clip_grad_norm) value_model_config = Config(nn_config=value_nn_config) value_model = ValueModel.from_config(config=value_model_config, env=env, clip_range=config.value_clip, opt_params=config.value_opt_params, clip_grad_norm=config.clip_grad_norm) # Create agent agent = PGAgent(env=env, policy_model=policy_model, value_model=value_model, normalize_advantages=config.normalize_advantages, log_dir=log_dir) agent.train(max_steps=config.max_steps, steps_per_batch=config.steps_per_batch)
NUM_WORKERS = len(device_ids) * 4 BATCH_SIZE = len(device_ids) * 24 # data_path = '/home/jianglb/pythonproject/cloud_segment/data/train_{}_{}'.format(*RESIZE) data_path = '/home/noel/pythonproject/cloud_segment/data/train_{}_{}'.format( *RESIZE) train_csv = os.path.join(DATA_PATH, 'train.csv') kfold_path = 'kfold.pkl' dir_name = '{}_{}_all'.format(CLASSIFER, time_str) save_dir = os.path.join(SAVE_PATH, 'classify', dir_name) if not os.path.exists(save_dir): os.mkdir(save_dir) with open(os.path.join(save_dir, 'config'), 'w') as f: f.write('{}\n\n{}'.format(time_str, utils.config2str(config))) mean_loss = 0 for fold in range(K): model = load_model(CLASSIFER, classes=4, dropout=dropout, pretrained=True) model.cuda() model.train() optimizer = optim.Adam(model.parameters(), lr=LR, weight_decay=WD) criterion = torch.nn.BCEWithLogitsLoss() scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, factor=0.1, patience=patience) preprocessing_fn = smp.encoders.get_preprocessing_fn( 'resnet34', 'imagenet')
'rb') as f: config = pickle.load(f) encoder_weights = config['encoder_weights'] seg_name = '{}{}'.format(config['MODEL'], config['ENCODER']) cls_name = cls_fold.split('_')[0] time_str = time.strftime("%m%d-%H%M", time.localtime()) save_dir = os.path.join(base_folder, '{}_{}_{}'.format(seg_name, cls_name, time_str)) if not os.path.exists(save_dir): os.mkdir(save_dir) with open(os.path.join(save_dir, 'config'), 'w') as f: f.write('{}\n\n{}'.format( time_str, utils.config2str({ 'seg_fold': seg_fold, 'cls_fold': cls_fold }))) # search thresholds seg_probs = np.load( os.path.join(SAVE_PATH, 'segment', seg_fold, 'seg_probs.npy')) cls_probs = np.load( os.path.join(SAVE_PATH, 'classify', cls_fold, 'cls_probs.npy')) tg_masks = np.load(os.path.join(SAVE_PATH, 'tg', 'tg_masks.npy')) tg_classes = np.load(os.path.join(SAVE_PATH, 'tg', 'tg_classes.npy')) cls_threshold, seg_threshold, cpn_threshold, best_dice = utils.search_threshold( tg_masks, seg_probs, tg_classes, cls_probs) with open(os.path.join(save_dir, 'config'), 'a') as f:
'epoch_num': 1, 'gpu': 0, 'lr': 1e-4, # 'lr_decay':0.1, 'weight_decay': 0.2 } print('Start Training & Inference...') timestr = time.strftime('%m%d-%H%M') if not os.path.exists('saved_models/NN_{}'.format(timestr)): os.mkdir('saved_models/NN_{}'.format(timestr)) log = timestr log += '\n' log += utils.config2str(config) log += '\n' train_preds = [] test_preds = [] cv_score = {'train': 0, 'validate': 0} for fold in range(cv): print('Fold{}:'.format(fold)) train_idx = kfold[fold][0] validate_idx = kfold[fold][1] x_test = data_test[fold] x_validate = data_train[fold][validate_idx] y_validate = label_train[validate_idx] w_validate = weight[validate_idx]
ensmeble_fold = 'Ensemble14_mean_1118-1530' cls_fold = 'Ensemble5_max_1116-2332' time_str = time.strftime("%m%d-%H%M", time.localtime()) save_dir = os.path.join(base_folder, '{}_{}_{}'.format(ensmeble_fold, cls_fold, time_str)) print(save_dir) if not os.path.exists(save_dir): os.mkdir(save_dir) with open(os.path.join(save_dir, 'config'), 'w') as f: f.write('{}\n\n{}'.format( time_str, utils.config2str({ 'ensmeble_fold': ensmeble_fold, 'cls_fold': cls_fold }))) # search thresholds seg_probs = np.load( os.path.join(SAVE_PATH, 'segment', ensmeble_fold, 'seg_probs.npy')) cls_probs = np.load( os.path.join(SAVE_PATH, 'classify', cls_fold, 'cls_probs.npy')) tg_masks = np.load(os.path.join(SAVE_PATH, 'tg', 'tg_masks.npy')) tg_classes = np.load(os.path.join(SAVE_PATH, 'tg', 'tg_classes.npy')) cls_threshold, seg_threshold, cpn_threshold, best_dice = utils.search_threshold( tg_masks, seg_probs, tg_classes, cls_probs) with open(os.path.join(save_dir, 'config'), 'a') as f:
'alpha':0.2, 'lambda':0.2, # 'nthread':4, # 'scale_pos_weight':1, 'seed':2019, } print('Start Training & Inference...') timestr = time.strftime('%m%d-%H%M') if not os.path.exists('saved_models/XGB_{}'.format(timestr)): os.mkdir('saved_models/XGB_{}'.format(timestr)) log = timestr log += '\n' log += utils.config2str(params) log += '\n' train_preds = [] test_preds = [] cv_score = {'train':0,'validate':0} for fold in range(cv): print('Fold{}:'.format(fold)) train_idx = kfold[fold][0] validate_idx = kfold[fold][1] x_test = data_test[fold] x_validate = data_train[fold][validate_idx]