def evaluate(self, child_pool):
     results = []
     for child in tqdm(child_pool, position=1, desc='Evaluate   ', leave=False):
         result = self.result_cache.get(encode_config(child))
         if result is None:
             result = {}
             fakes, names = [], []
             if isinstance(self.model, SPADEModel):
                 self.model.calibrate(child)
             for i, data_i in enumerate(self.dataloader):
                 self.model.set_input(data_i)
                 self.model.test(child)
                 fakes.append(self.model.fake_B.cpu())
                 for path in self.model.get_image_paths():
                     short_path = ntpath.basename(path)
                     name = os.path.splitext(short_path)[0]
                     names.append(name)
             if self.inception_model is not None:
                 result['fid'] = get_fid(fakes, self.inception_model, self.npz,
                                         self.device, opt.batch_size, tqdm_position=2)
             if self.drn_model is not None:
                 result['mIoU'] = get_cityscapes_mIoU(fakes, names, self.drn_model, self.device,
                                                      data_dir=opt.cityscapes_path, batch_size=opt.batch_size,
                                                      num_workers=opt.num_threads, tqdm_position=2)
             if self.deeplabv2_model is not None:
                 torch.cuda.empty_cache()
                 result['accu'], result['mIoU'] = get_coco_scores(fakes, names, self.deeplabv2_model, self.device,
                                                                  opt.dataroot, 1, num_workers=0, tqdm_position=2)
             if len(self.result_cache) < self.opt.max_cache_size:
                 self.result_cache[encode_config(child)] = result
         results.append(result)
     return results
 def random_sample(self):
     while True:
         sample = self.configs.sample(weighted_sample=self.opt.weighted_sample)
         macs, _ = self.model.profile(sample, verbose=False)
         macs = self.macs_cache.get(encode_config(sample))
         if macs is None:
             macs, _ = self.model.profile(sample, verbose=False)
             if len(self.macs_cache) < self.opt.max_cache_size:
                 self.macs_cache[encode_config(sample)] = macs
         if macs <= self.opt.budget:
             return sample, macs
 def crossover_sample(self, sample1, sample2):
     while True:
         new_sample = copy.deepcopy(sample1)
         for i in range(len(new_sample['channels'])):
             new_sample['channels'][i] = random.choice([sample1['channels'][i], sample2['channels'][i]])
         macs = self.macs_cache.get(encode_config(new_sample))
         if macs is None:
             macs, _ = self.model.profile(new_sample, verbose=False)
             if len(self.macs_cache) < self.opt.max_cache_size:
                 self.macs_cache[encode_config(new_sample)] = macs
         if macs <= self.opt.budget:
             return new_sample, macs
 def mutate_sample(self, sample):
     while True:
         new_sample = copy.deepcopy(sample)
         for i in range(len(new_sample['channels'])):
             if random.random() < self.opt.mutate_prob:
                 new_sample['channels'][i] = self.configs.sample_layer(i)
         macs = self.macs_cache.get(encode_config(new_sample))
         if macs is None:
             macs, _ = self.model.profile(new_sample, verbose=False)
             if len(self.macs_cache) < self.opt.max_cache_size:
                 self.macs_cache[encode_config(new_sample)] = macs
         if macs <= self.opt.budget:
             return new_sample, macs
Esempio n. 5
0
def main(cfgs):
    fluid.enable_imperative() 
    if 'resnet' in cfgs.netG:
        from configs.resnet_configs import get_configs
    else:
        raise NotImplementedError
    configs = get_configs(config_name=cfgs.config_set)
    configs = list(configs.all_configs())

    data_loader, id2name = create_eval_data(cfgs, direction=cfgs.direction)
    model = TestModel(cfgs)
    model.setup()  ### load_network

    ### this input used in compute model flops and params
    for data in data_loader:
        model.set_input(data)
        break

    npz = np.load(cfgs.real_stat_path)
    results = []
    for config in configs:
        fakes, names = [], []
        flops, _ = model.profile(config=config)
        s_time = time.time()
        for i, data in enumerate(data_loader()):
            model.set_input(data)
            model.test(config)
            generated = model.fake_B
            fakes.append(generated.detach().numpy())
            name = id2name[i]
            save_path = os.path.join(cfgs.save_dir, 'test' + str(config))
            if not os.path.exists(save_path):
                os.makedirs(save_path)
            save_path = os.path.join(save_path, name)
            names.append(name)
            if i < cfgs.num_test:
               image = util.tensor2img(generated)
               util.save_image(image, save_path)

        result = {'config_str': encode_config(config), 'flops': flops} ### compute FLOPs

        fluid.disable_imperative()
        if not cfgs.no_fid:
            block_idx = InceptionV3.BLOCK_INDEX_BY_DIM[2048]
            inception_model = InceptionV3([block_idx])
            fid = get_fid(fakes, inception_model, npz, cfgs.inception_model_path, batch_size=cfgs.batch_size, use_gpu=cfgs.use_gpu)
            result['fid'] = fid
        fluid.enable_imperative() 

        e_time = (time.time() - s_time) / 60
        result['time'] = e_time
        print(result)
        results.append(result)

    if not os.path.exists(cfgs.save_dir):
        os.makedirs(os.path.dirname(cfgs.save_dir))
    save_file = os.path.join(cfgs.save_dir, 'search_result.pkl')
    with open(save_file, 'wb') as f:
        pickle.dump(results, f)
    print('Successfully finish searching!!!')
Esempio n. 6
0
def main(configs, opt, gpu_id, queue, verbose):
    opt.gpu_ids = [gpu_id]
    dataloader = create_dataloader(opt, verbose)
    model = create_model(opt, verbose)
    model.setup(opt, verbose)
    device = model.device
    if not opt.no_fid:
        block_idx = InceptionV3.BLOCK_INDEX_BY_DIM[2048]
        inception_model = InceptionV3([block_idx])
        inception_model.to(device)
        inception_model.eval()
    if 'cityscapes' in opt.dataroot and opt.direction == 'BtoA':
        drn_model = DRNSeg('drn_d_105', 19, pretrained=False)
        util.load_network(drn_model, opt.drn_path, verbose=False)
        if len(opt.gpu_ids) > 0:
            drn_model = nn.DataParallel(drn_model, opt.gpu_ids)
        drn_model.eval()

    npz = np.load(opt.real_stat_path)
    results = []
    for config in tqdm.tqdm(configs):
        fakes, names = [], []
        for i, data_i in enumerate(dataloader):
            model.set_input(data_i)
            if i == 0:
                macs, _ = model.profile(config)
            model.test(config)
            fakes.append(model.fake_B.cpu())
            for path in model.get_image_paths():
                short_path = ntpath.basename(path)
                name = os.path.splitext(short_path)[0]
                names.append(name)

        result = {'config_str': encode_config(config), 'macs': macs}
        if not opt.no_fid:
            fid = get_fid(fakes,
                          inception_model,
                          npz,
                          device,
                          opt.batch_size,
                          use_tqdm=False)
            result['fid'] = fid
        if 'cityscapes' in opt.dataroot and opt.direction == 'BtoA':
            mAP = get_mAP(fakes,
                          names,
                          drn_model,
                          device,
                          data_dir=opt.cityscapes_path,
                          batch_size=opt.batch_size,
                          num_workers=opt.num_threads,
                          use_tqdm=False)
            result['mAP'] = mAP
        print(result, flush=True)
        # print('Time Cost: %.2fmin' % ((time.time() - start_time) / 60), flush=True)
        results.append(result)
    queue.put(results)
Esempio n. 7
0
        else:
            qualified = True

        fakes, names = [], []

        if qualified:
            for i, data_i in enumerate(dataloader):
                model.set_input(data_i)
                model.test(config)
                fakes.append(model.fake_B.cpu())
                for path in model.get_image_paths():
                    short_path = ntpath.basename(path)
                    name = os.path.splitext(short_path)[0]
                    names.append(name)

        result = {'config_str': encode_config(config), 'macs': macs}
        if not opt.no_fid:
            if qualified:
                fid = get_fid(fakes,
                              inception_model,
                              npz,
                              device,
                              opt.batch_size,
                              use_tqdm=False)
                result['fid'] = fid
            else:
                result['fid'] = 1e9
        if 'cityscapes' in opt.dataroot and opt.direction == 'BtoA':
            if qualified:
                mIoU = get_mIoU(fakes,
                                names,
def tuple2item(info):
    result, config, macs = info
    ret = copy.deepcopy(result)
    ret['config_str'] = encode_config(config)
    ret['macs'] = macs
    return ret
Esempio n. 9
0
    device = model.device

    inception_model, drn_model, deeplabv2_model = create_metric_models(opt, device)
    npz = np.load(opt.real_stat_path)

    results, eval_configs = restore_results(opt)

    last_save_time = time.time()

    for data_i in dataloader:
        model.set_input(data_i)
        break

    configs_tqdm = tqdm.tqdm(configs, desc='Configs    ', position=0)
    for config in configs_tqdm:
        config_str = encode_config(config)
        if config_str in eval_configs:
            continue
        macs, _ = model.profile(config, verbose=False)
        result = {'config_str': config_str, 'macs': macs}
        qualified = (macs <= opt.budget)

        fakes, names = [], []
        if qualified:
            if isinstance(model, SPADEModel):
                model.calibrate(config)
            for i, data_i in enumerate(dataloader):
                model.set_input(data_i)
                model.test(config)
                fakes.append(model.fake_B.cpu())
                for path in model.get_image_paths():