log = [] for config.train['split_random_seed'] in tqdm(range(args.start,args.num_models) , desc='bagging' , leave=False): if args.model_name == 'tede': config.net['name'] = 'tede_resnet18' config.train['optimizer'] = 'SGD' config.train['batch_size'] = 64 #config.train['lr_bounds'] = [ 0 , 40 , 60 ,72 , 80 ] #config.train['lrs'] = [ 1e-1 , 1e-2 , 1e-3 , 1e-4 ] config.train['lr_bounds'] = [ 0 , 40 , 48 , 52 ] config.train['lrs'] = [ 1e-1 , 1e-2 , 1e-3 ] config.train['add_attributes'] = True config.train['add_class_wordsembeddings'] = True config.parse_config() activation_fn = partial( nn.LeakyReLU ) config.net['semantic_mlp_kwargs']['last_activation_fn'] = activation_fn config.net['visual_mlp_kwargs']['activation_fn'] = activation_fn feature_layer_dim = 384 config.net['visual_mlp_kwargs']['out_channels'] = feature_layer_dim config.net['semantic_mlp_kwargs']['out_channels'] = feature_layer_dim result = main( config ) log.append( [ str(config.train['split_random_seed']) , result['log_path'] , result['log_path'], str(1 - result['non_zero']) , str(1 - result['zero']) ] ) elif args.model_name == 'gcn': config.train['save_metric'] = 'err' config.train['optimizer'] = 'SGD' config.train['batch_size'] = 64 config.train['lr_bounds'] = [ 0 , 40 , 60 ,72 , 80 ] config.train['lrs'] = [ 3e-1 , 3e-2 , 3e-3 , 1e-4 ]
def main(args): fc = [] with open(args.model_info) as fp: first = True a = fp.readlines() a = [i for i in map(lambda x: x.strip().split(' '), a)] #print(a) if args.top is not None: a.sort(key=lambda x: float(x[-1]), reverse=False) a = a[:args.top] print([i for i in map(lambda x: float(x[-1]), a)]) for l in tqdm(a, leave=False): if args.model_name == 'gcn': import test_gcn random_seed, resume_non_zero_net, resume, nonzero_acc, zero_acc = l[ 0], l[1], float(l[2]), float(l[3]) arg = type('', (), {})() arg.resume = resume arg.input_list = '../data/test.txt' arg.resume_non_zero_net = resume_non_zero_net arg.resume_coarse_net = None arg.zero_acc = zero_acc arg.nonzero_acc = nonzero_acc arg.x_tag = 'feat' arg.output_list = '../data/output.list' arg.batch_size = 256 arg.has_label = False if not args.use_non_zero_net: arg.resume_non_zero_net = None import train_config as config config.train['split_random_seed'] = random_seed config.train['graph_similarity'] = 'custom' config.parse_config() temp_fc = test_gcn.main(arg, config)['fc'] elif args.model_name == 'tede': import test_tede arg = type('', (), {})() random_seed, resume, _, nonzero_acc, zero_acc = l[0], l[1], l[ 2], float(l[3]), float(l[4]) arg.resume = resume arg.input_list = '../data/test.txt' arg.batch_size = 256 arg.has_label = False arg.output_list = '../data/output.list' arg.resume_epoch = args.resume_epoch import train_config as config config.train['split_random_seed'] = random_seed config.net['name'] = 'tede_resnet18' config.parse_config() feature_layer_dim = 384 config.net['visual_mlp_kwargs'][ 'out_channels'] = feature_layer_dim config.net['semantic_mlp_kwargs'][ 'out_channels'] = feature_layer_dim temp_fc = test_tede.main(arg, config)['fc'] os.system('rm {}'.format(arg.output_list)) if first: fc = temp_fc first = False else: for i in range(len(fc)): for k in fc[i]: fc[i][k] += temp_fc[i][k] y = [] for fc_dict in fc: y.append(max(fc_dict, key=fc_dict.get)) test_list = open('../data/test.txt').read().strip().split('\n') assert len(y) == len(test_list) output_list = [] for label, line in zip(y, test_list): output_list.append( line.split('\t')[0].strip().split('/')[-1] + '\t' + label) with open(args.output_list, 'w') as fp: fp.write('\n'.join(output_list) + '\n') fp.flush()