Example #1
0
def main():
    global args
    args = parser.parse_args()

    print("=> using pre-trained model '{}'".format(args.arch))
    model = convnets.factory({'arch': args.arch},
                             cuda=True,
                             data_parallel=True)

    extract_name = 'arch,{}_size,{}'.format(args.arch, args.size)

    normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
                                     std=[0.229, 0.224, 0.225])

    if args.dataset == 'coco':
        if 'coco' not in args.dir_data:
            raise ValueError('"coco" string not in dir_data')
        dataset = datasets.COCOImages(args.data_split,
                                      dict(dir=args.dir_data),
                                      transform=transforms.Compose([
                                          transforms.Scale(args.size),
                                          transforms.CenterCrop(args.size),
                                          transforms.ToTensor(),
                                          normalize,
                                      ]))
    elif args.dataset == 'vgenome':
        if args.data_split != 'train':
            raise ValueError('train split is required for vgenome')
        if 'vgenome' not in args.dir_data:
            raise ValueError('"vgenome" string not in dir_data')
        dataset = datasets.VisualGenomeImages(args.data_split,
                                              dict(dir=args.dir_data),
                                              transform=transforms.Compose([
                                                  transforms.Scale(args.size),
                                                  transforms.CenterCrop(
                                                      args.size),
                                                  transforms.ToTensor(),
                                                  normalize,
                                              ]))

    data_loader = DataLoader(dataset,
                             batch_size=args.batch_size,
                             shuffle=False,
                             num_workers=args.workers,
                             pin_memory=True)

    dir_extract = os.path.join(args.dir_data, 'extract', extract_name)
    path_file = os.path.join(dir_extract, args.data_split + 'set')
    os.system('mkdir -p ' + dir_extract)

    extract(data_loader, model, path_file, args.mode)
Example #2
0
def main():
    global args, options, model, cnn, transform, trainset
    args = parser.parse_args()

    options = {'logs': {'dir_logs': args.dir_logs}}
    if args.path_opt is not None:
        with open(args.path_opt, 'r') as handle:
            options_yaml = yaml.load(handle)
        options = utils.update_values(options, options_yaml)
    print('## args')
    pprint(vars(args))
    print('## options')
    pprint(options)

    trainset = datasets.factory_VQA(options['vqa']['trainsplit'],
                                    options['vqa'])
    #options['coco'])

    normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
                                     std=[0.229, 0.224, 0.225])
    transform = transforms.Compose([
        transforms.Scale(options['coco']['size']),
        transforms.CenterCrop(options['coco']['size']),
        transforms.ToTensor(),
        normalize,
    ])

    opt_factory_cnn = {'arch': options['coco']['arch']}
    cnn = convnets.factory(opt_factory_cnn,
                           cuda=args.cuda,
                           data_parallel=False)
    model = models.factory(options['model'],
                           trainset.vocab_words(),
                           trainset.vocab_answers(),
                           cuda=args.cuda,
                           data_parallel=False)
    model.eval()
    start_epoch, best_acc1, _ = load_checkpoint(
        model, None, os.path.join(options['logs']['dir_logs'], args.resume))

    my_local_ip = '192.168.0.32'
    my_local_port = 3456
    run_simple(my_local_ip, my_local_port, application)
def initialize(args, dataset="breast"):
    options = {
        'logs': {
            'dir_logs': args.dir_logs
        }
    }
    if args.path_opt is not None:
        with open(args.path_opt, 'r') as handle:
            options_yaml = yaml.load(handle)
        options = utils.update_values(options, options_yaml)

    print("\n>> load trainset...")
    trainset = datasets.factory_VQA(options['vqa']['trainsplit'],
                                    options['vqa'],
                                    options['coco'],
                                    options['vgenome'])

    print("\n>> load cnn model...")
    if dataset == "idrid":
        cnn = convnets_idrid.factory(
            {'arch': "resnet152_idrid"}, cuda=True, data_parallel=False)
    elif dataset == "tools":
        cnn = convnets_tools.factory(
            {'arch': "resnet152_tools"}, cuda=True, data_parallel=False)
    elif dataset == "breast":
        cnn = convnets_breast.factory(
            {'arch': "resnet152_breast"}, cuda=True, data_parallel=False)
    elif dataset == "vqa2":
        cnn = convnets.factory(
            {'arch': "fbresnet152"}, cuda=True, data_parallel=False)

    cnn = cnn.cuda()

    print("\n>> load vqa model...")
    model = load_vqa_model(args, dataset, args.vqa_model)
    model = model.cuda()

    return cnn, model, trainset