def parse_arguments(): paths = hico_config.Paths() parser = argparse.ArgumentParser(description='HICO dataset') parser.add_argument('--data-root', default=paths.data_root, help='dataset path') parser.add_argument('--tmp-root', default=paths.tmp_root, help='intermediate result path') return parser.parse_args()
def main(): paths = hico_config.Paths() start_time = time.time() collect_data(paths) print('Time elapsed: {:.2f}s'.format(time.time() - start_time))
def main(): paths = hico_config.Paths() extract_features(paths)
def parse_arguments(): paths = hico_config.Paths() feature_type = 'resnet' parser = argparse.ArgumentParser(description='PyTorch ImageNet Training') parser.add_argument('--feature-type', default=feature_type, help='feature_type') parser.add_argument('--data', metavar='DIR', default=paths.data_root, help='path to dataset') parser.add_argument('-j', '--workers', default=1, type=int, metavar='N', help='number of data loading workers (default: 4)') parser.add_argument('--epochs', default=100, type=int, metavar='N', help='number of total epochs to run') parser.add_argument('--start-epoch', default=0, type=int, metavar='N', help='manual epoch number (useful on restarts)') parser.add_argument('-b', '--batch-size', default=32, type=int, metavar='N', help='mini-batch size (default: 256)') parser.add_argument('--lr', '--learning-rate', default=1e-4, type=float, metavar='LR', help='initial learning rate') parser.add_argument('--momentum', default=0.9, type=float, metavar='M', help='momentum') parser.add_argument('--weight-decay', '--wd', default=1e-3, type=float, metavar='W', help='weight decay (default: 1e-4)') parser.add_argument('--print-freq', '-p', default=30, type=int, metavar='N', help='print frequency (default: 10)') parser.add_argument( '--resume', default=os.path.join( paths.tmp_root, 'checkpoints/hico/finetune_{}'.format(feature_type)), type=str, metavar='PATH', help='path to latest checkpoint (default: none)') parser.add_argument('-e', '--evaluate', dest='evaluate', action='store_true', help='evaluate model on validation set') parser.add_argument('--pretrained', dest='pretrained', default=True, action='store_true', help='use pre-trained model') parser.add_argument('--world-size', default=1, type=int, help='number of distributed processes') parser.add_argument('--dist-url', default='tcp://224.66.41.62:23456', type=str, help='url used to set up distributed training') parser.add_argument('--dist-backend', default='gloo', type=str, help='distributed backend') return parser.parse_args()
def main(): paths = hico_config.Paths() imagesets = ['train', 'test'] for imageset in imagesets: extract_features(paths, imageset)
def main(): paths = hico_config.Paths() find_rare_hoi(paths)