Exemple #1
0
                               batch_size=batch_size * 4,
                               shuffle=False,
                               num_workers=num_workers)

        # Extract embedding layer
        model.module.fc = Identity()
        model.eval()
        DATASETS = ['tst', 'val', 'trn']
        LOADERS = [tstloader, valloader, trnloader]
        for typ, loader in zip(DATASETS, LOADERS):
            ls = []
            for step, batch in enumerate(loader):
                if step % 1000 == 0:
                    logger.info('Embedding {} step {} of {}'.format(
                        typ, step, len(loader)))
                inputs = batch["image"]
                inputs = inputs.to(device, dtype=torch.float)
                out = model(inputs)
                ls.append(out.detach().cpu().numpy())
            outemb = np.concatenate(ls, 0).astype(np.float32)
            logger.info('Write embeddings : shape {} {}'.format(*outemb))
            np.savez_compressed(
                os.path.join(
                    WORK_DIR, 'emb{}_{}_size{}_fold{}_ep{}'.format(
                        HFLIP + TRANSPOSE, typ, SIZE, fold, epoch), outemb))
            dumpobj(
                os.path.join(
                    WORK_DIR, 'loader{}_{}_size{}_fold{}_ep{}'.format(
                        HFLIP + TRANSPOSE, typ, SIZE, fold, epoch), loader))
            gc.collect()
Exemple #2
0
        tstloader = DataLoader(tstdataset,
                               batch_size=batch_size * 4,
                               shuffle=False,
                               num_workers=num_workers)
        # Extract embedding layer
        model.module.fc = Identity()
        #model = torch.nn.DataParallel(model, device_ids=list(range(n_gpu)))
        model.eval()
        for typ, loader in zip(['tst', 'val', 'trn'],
                               [tstloader, valloader, trnloader]):
            ls = []
            for step, batch in enumerate(loader):
                if step % 1000 == 0:
                    logger.info('Embedding {} step {} of {}'.format(
                        typ, step, len(loader)))
                inputs = batch["image"]
                inputs = inputs.to(device, dtype=torch.float)
                out = model(inputs)
                ls.append(out.detach().cpu().numpy())
                #logger.info('Out shape {}'.format(out.shape))
                #logger.info('Final ls shape {}'.format(ls[-1].shape))
            outemb = np.concatenate(ls, 0)
            logger.info('Write embeddings : shape {} {}'.format(*outemb))
            np.savez_compressed(
                'emb_{}_size{}_fold{}_ep{}'.format(typ, SIZE, fold, epoch),
                outemb)
            dumpobj(
                'loader_{}_size{}_fold{}_ep{}'.format(typ, SIZE, fold, epoch),
                loader)
            gc.collect()
Exemple #3
0
        model.eval()
        if STAGE2:
            DATASETS = ['tst2']
            LOADERS = [tst2loader]
        else:
            DATASETS = ['tst', 'val', 'trn']
            LOADERS = [tstloader, valloader, trnloader]

        for typ, loader in zip(DATASETS, LOADERS):
            ls = []
            for step, batch in enumerate(loader):
                if step % 1000 == 0:
                    logger.info('Embedding {} step {} of {}'.format(
                        typ, step, len(loader)))
                inputs = batch["image"]
                inputs = inputs.to(device, dtype=torch.float)
                out = model(inputs)
                ls.append(out.detach().cpu().numpy())
                #logger.info('Out shape {}'.format(out.shape))
                #logger.info('Final ls shape {}'.format(ls[-1].shape))
            outemb = np.concatenate(ls, 0).astype(np.float32)
            logger.info('Write embeddings : shape {} {}'.format(*outemb))
            np.savez_compressed(
                'emb{}_{}_size{}_fold{}_ep{}'.format(HFLIP + TRANSPOSE, typ,
                                                     SIZE, fold, epoch),
                outemb)
            dumpobj(
                'loader{}_{}_size{}_fold{}_ep{}'.format(
                    HFLIP + TRANSPOSE, typ, SIZE, fold, epoch), loader)
            gc.collect()