def main(): # get args and configs args = parse_argument() configs = Config() set_names(args) common.set_random_seed(configs) common.set_additional_args(args, configs) common.setup_log(args) common.set_process_name(args) use_cuda = common.check_gpu(args) print_settings(args, configs) run(args, configs)
def main(): """Entry.""" # init distributed global is_root_rank if FLAGS.use_distributed: udist.init_dist() FLAGS.batch_size = udist.get_world_size() * FLAGS.per_gpu_batch_size FLAGS._loader_batch_size = FLAGS.per_gpu_batch_size if FLAGS.bn_calibration: FLAGS._loader_batch_size_calib = FLAGS.bn_calibration_per_gpu_batch_size FLAGS.data_loader_workers = round(FLAGS.data_loader_workers / udist.get_local_size()) is_root_rank = udist.is_master() else: count = torch.cuda.device_count() FLAGS.batch_size = count * FLAGS.per_gpu_batch_size FLAGS._loader_batch_size = FLAGS.batch_size if FLAGS.bn_calibration: FLAGS._loader_batch_size_calib = FLAGS.bn_calibration_per_gpu_batch_size * count is_root_rank = True FLAGS.lr = FLAGS.base_lr * (FLAGS.batch_size / FLAGS.base_total_batch) # NOTE: don't drop last batch, thus must use ceil, otherwise learning rate # will be negative FLAGS._steps_per_epoch = int(np.ceil(NUM_IMAGENET_TRAIN / FLAGS.batch_size)) if is_root_rank: FLAGS.log_dir = '{}/{}'.format(FLAGS.log_dir, time.strftime("%Y%m%d-%H%M%S")) create_exp_dir( FLAGS.log_dir, FLAGS.config_path, blacklist_dirs=[ 'exp', '.git', 'pretrained', 'tmp', 'deprecated', 'bak', ], ) setup_logging(FLAGS.log_dir) for k, v in _ENV_EXPAND.items(): logging.info('Env var expand: {} to {}'.format(k, v)) logging.info(FLAGS) set_random_seed(FLAGS.get('random_seed', 0)) with SummaryWriterManager(): train_val_test()
def main(): """Entry.""" FLAGS.test_only = True mc.setup_distributed() if udist.is_master(): FLAGS.log_dir = '{}/{}'.format(FLAGS.log_dir, time.strftime("%Y%m%d-%H%M%S-eval")) setup_logging(FLAGS.log_dir) for k, v in _ENV_EXPAND.items(): logging.info('Env var expand: {} to {}'.format(k, v)) logging.info(FLAGS) set_random_seed(FLAGS.get('random_seed', 0)) with mc.SummaryWriterManager(): val()
def main(): """Entry.""" NUM_IMAGENET_TRAIN = 1281167 if FLAGS.dataset == 'cityscapes': NUM_IMAGENET_TRAIN = 2975 elif FLAGS.dataset == 'ade20k': NUM_IMAGENET_TRAIN = 20210 elif FLAGS.dataset == 'coco': NUM_IMAGENET_TRAIN = 149813 mc.setup_distributed(NUM_IMAGENET_TRAIN) if FLAGS.net_params and FLAGS.model_kwparams.task == 'segmentation': tag, input_channels, block1, block2, block3, block4, last_channel = FLAGS.net_params.split( '-') input_channels = [int(item) for item in input_channels.split('_')] block1 = [int(item) for item in block1.split('_')] block2 = [int(item) for item in block2.split('_')] block3 = [int(item) for item in block3.split('_')] block4 = [int(item) for item in block4.split('_')] last_channel = int(last_channel) inverted_residual_setting = [] for item in [block1, block2, block3, block4]: for _ in range(item[0]): inverted_residual_setting.append([ item[1], item[2:-int(len(item) / 2 - 1)], item[-int(len(item) / 2 - 1):] ]) FLAGS.model_kwparams.input_channel = input_channels FLAGS.model_kwparams.inverted_residual_setting = inverted_residual_setting FLAGS.model_kwparams.last_channel = last_channel if udist.is_master(): FLAGS.log_dir = '{}/{}'.format(FLAGS.log_dir, time.strftime("%Y%m%d-%H%M%S")) # yapf: disable create_exp_dir(FLAGS.log_dir, FLAGS.config_path, blacklist_dirs=[ 'exp', '.git', 'pretrained', 'tmp', 'deprecated', 'bak', 'output']) # yapf: enable setup_logging(FLAGS.log_dir) for k, v in _ENV_EXPAND.items(): logging.info('Env var expand: {} to {}'.format(k, v)) logging.info(FLAGS) set_random_seed(FLAGS.get('random_seed', 0)) with mc.SummaryWriterManager(): train_val_test()
def main(): """Entry.""" NUM_IMAGENET_TRAIN = 1281167 mc.setup_distributed(NUM_IMAGENET_TRAIN) if udist.is_master(): FLAGS.log_dir = '{}/{}'.format(FLAGS.log_dir, time.strftime("%Y%m%d-%H%M%S")) # yapf: disable create_exp_dir(FLAGS.log_dir, FLAGS.config_path, blacklist_dirs=[ 'exp', '.git', 'pretrained', 'tmp', 'deprecated', 'bak']) # yapf: enable setup_logging(FLAGS.log_dir) for k, v in _ENV_EXPAND.items(): logging.info('Env var expand: {} to {}'.format(k, v)) logging.info(FLAGS) set_random_seed(FLAGS.get('random_seed', 0)) with mc.SummaryWriterManager(): train_val_test()
from statistics import mean, stdev if __name__ == "__main__": cfg_json_list = [ # "config/prelim/huff_nc5_sr_c.json", # "config/prelim/huff_nc5_sr_r.json", # "config/prelim/fewrel_nc5_sr_c.json", # "config/prelim/fewrel_nc5_sr_r.json", # "config/prelim/huff_nc5_rd_c.json", # "config/prelim/huff_nc5_rd_r.json", # "config/prelim/huff_nc5_eda_c.json", # "config/prelim/huff_nc5_eda_r.json", ] for cfg_json in cfg_json_list: cfg = configuration.triplet_ap_cl_gradual_config.from_json(cfg_json) print(f"\n\n\nconfig from {cfg_json}") acc_list = [] for seed_num in range(cfg.num_seeds): common.set_random_seed(seed_num) acc = triplet_cl_methods.train_eval_cl_gradual_model(cfg, seed_num) acc_list.append(acc) acc_mean = mean(acc_list) acc_stdev = stdev(acc_list) if len(acc_list) > 1 else -1 print(f"acc: {acc_mean:.3f} pm {acc_stdev:.3f} ; {acc_list}")
import json import os from data import preprocess from data import loader from utils import common import model flags.DEFINE_string('experiments_path', os.path.join(common.project_root, 'experiments'), help='') flags.DEFINE_string('config_name', 'config/AConvNet-SOC.json', help='') FLAGS = flags.FLAGS common.set_random_seed(12321) def load_dataset(path, is_train, name, batch_size): transform = [preprocess.CenterCrop(88), torchvision.transforms.ToTensor()] if is_train: transform = [ preprocess.RandomCrop(88), torchvision.transforms.ToTensor() ] _dataset = loader.Dataset( path, name=name, is_train=is_train, transform=torchvision.transforms.Compose(transform)) data_loader = torch.utils.data.DataLoader(_dataset,