Exemple #1
0
 def __init__(self):
     # 加载配置文件
     self.config = utils.loadYaml('../config/config.yaml')
     # 训练结果保存路径
     self.output_model_path = os.path.join(
         '../output/', self.config['Misc']['OutputFolderName'])
     # gpu使用
     self.device = utils.set_gpu(self.config)
Exemple #2
0
    def __init__(self):
        # 加载配置文件
        self.config = utils.loadYaml('../config/config.yaml')
        # 训练结果保存路径
        self.output_model_path = os.path.join(
            '../output/', self.config['Misc']['OutputFolderName'])
        if self.config['Misc']['OutputFolderName']:
            utils.mkdir(self.output_model_path)
        else:
            raise IOError('请输入训练结果保存路径...')
        # gpu使用
        self.device = utils.set_gpu(self.config)
        self.summaryWriter = SummaryWriter(log_dir=self.output_model_path)

        # logger日志
        setup_logger(self.output_model_path)
        self.logger = get_logger()
Exemple #3
0
    def __init__(self):
        # 加载配置文件
        self.config = utils.loadYaml('../config/config.yaml')
        # 训练结果保存路径
        self.output_model_path = os.path.join('../output/', self.config['Misc']['OutputFolderName'])
        # gpu使用
        self.device = utils.set_gpu(self.config)
        self.img_shape = (256, 256)
        self.overlap_piexl = 200
        self.imgs_index_dict, self.rois_start_xy_index_dict, self.rois_xyxy_index_dict = self.getRefInfo(
            ref_img_path='/home/pi/Desktop/df1b_dataset/20191024/ref_deploy')
        print(self.imgs_index_dict)
        print("Creating model")
        self.model = getModels(model_name=self.config['Model']['Name'],
                               num_classes=self.config['Model']['NumClass']).to(self.device)

        checkpoint = torch.load(os.path.join(self.output_model_path, self.config['Misc']['BestModelName']),
                                map_location='cpu')
        self.model.load_state_dict(checkpoint['model'])
        self.model.eval()
Exemple #4
0
import numpy as np

if __name__ == '__main__':
    parser = argparse.ArgumentParser()

    parser.add_argument('--config', default="exps/exp-v1/config.yaml")

    args = parser.parse_args()
    pprint(vars(args))

    with open(args.config, 'r') as f:
        cfg = AttrDict(yaml.load(f))
        cfg = AttrDict(cfg.test)

    set_gpu(cfg.gpu)
    ensure_path(cfg.result)

    dataset = MiniImageNet(cfg.datapath, 'test')
    sampler = CategoriesSampler(dataset.label, cfg.batch, cfg.way,
                                cfg.shot + cfg.query)
    loader = DataLoader(dataset,
                        batch_sampler=sampler,
                        num_workers=8,
                        pin_memory=True)

    trainset = MiniImageNet(cfg.datapath, 'train')
    propagate_loader = DataLoader(dataset=trainset,
                                  batch_size=1280,
                                  shuffle=True,
                                  num_workers=24,
Exemple #5
0
from utils.utils import set_torch_seed, set_gpu, get_tasks, get_data, get_model, get_backbone, get_strategy
from utils.utils import compress_args, get_args, torch_summarize
from utils.builder import ExperimentBuilder
from utils.bunch import bunch
import sys
import pprint

if __name__ == '__main__':

    args, excluded_args, parser = get_args()
    args = bunch.bunchify(args)

    set_torch_seed(args.seed)
    device = set_gpu(args.gpu)

    datasets = get_data(args)
    tasks = get_tasks(args)
    backbone = get_backbone(args, device)
    strategy = get_strategy(args, device)
    model = get_model(backbone, tasks, datasets, strategy, args, device)

    compressed_args = compress_args(bunch.unbunchify(args), parser)
    print(" ----------------- FULL ARGS (COMPACT) ----------------")
    pprint.pprint(compressed_args, indent=2)
    print(" ------------------------------------------------------")
    print(" ------------------ UNRECOGNISED ARGS -----------------")
    pprint.pprint(excluded_args, indent=2)
    print(" ------------------------------------------------------")

    system = ExperimentBuilder(model, tasks, datasets, device, args)
    system.load_pretrained()
Exemple #6
0
    parser.add_argument('--inner_lr', type=float, default=1e-2)
    parser.add_argument('--inner_opt', type=str, default='SGD')
    parser.add_argument('--outer_lr', type=float, default=1e-3)
    parser.add_argument('--outer_opt', type=str, default='Adam')
    parser.add_argument('--lr_sched', type=lambda x: (str(x).lower() == 'true'), default=False)
    
    # imaml specific settings
    parser.add_argument('--lambda', type=float, default=2.0)
    parser.add_argument('--version', type=str, default='GD')
    parser.add_argument('--cg_steps', type=int, default=5) 
    
    # network settings
    parser.add_argument('--net', type=str, default='ConvNet')
    parser.add_argument('--n_conv', type=int, default=4)
    parser.add_argument('--n_dense', type=int, default=0)
    parser.add_argument('--hidden_dim', type=int, default=64)
    parser.add_argument('--in_channels', type=int, default=1)
    parser.add_argument('--hidden_channels', type=int, default=64,
        help='Number of channels for each convolutional layer (default: 64).')

    args = parser.parse_args()

    return args

if __name__ == '__main__':
    args = parse_args()
    set_seed(args.seed)
    set_gpu(args.device)
    check_dir(args)
    main(args)
Exemple #7
0
def setup_algorithms(server_args):
    """
    Load datasets and pretrained models
    """

    loaded_models = {}
    datasets = None
    abspath = os.path.abspath(".")
    set_torch_seed(server_args.seed)

    if "exp_path" in server_args and server_args["exp_path"] is not None:
        abspath = os.path.abspath(server_args["exp_path"])

    for builder_args in server_args.models:
        original_args = copy.copy(builder_args)

        assert 'continue_from' in builder_args, 'All "models" should have a "continue_from" entry.'
        assert 'gpu' in builder_args, 'All "models" should have a specified "gpu" entry or "cpu" device.'

        stdin_list = [
            "--args_file",
            os.path.join(abspath, builder_args["continue_from"], 'configs',
                         'config.json'), "--continue_from",
            os.path.join(abspath, builder_args["continue_from"]), "--gpu",
            builder_args['gpu'], "--seed", server_args.seed, "--dataset",
            server_args.dataset, "--dataset_args",
            json.dumps({
                'dataset_version': server_args.version,
                'data_path': server_args.data_path
            })
        ]

        builder_args, excluded_args, parser = get_args(stdin_list)
        builder_args = bunch.bunchify(builder_args)

        compressed_args = compress_args(bunch.unbunchify(builder_args), parser)

        device = set_gpu(builder_args.gpu)
        tasks = get_tasks(builder_args)
        datasets = get_data(builder_args) if datasets is None else datasets
        backbone = get_backbone(builder_args, device)
        strategy = get_strategy(builder_args, device)
        model = get_model(backbone, tasks, datasets, strategy, builder_args,
                          device)

        compressed_args = compress_args(bunch.unbunchify(builder_args), parser)
        print(" ----------------- FULL ARGS (COMPACT) ----------------")
        pprint.pprint(compressed_args, indent=2)
        print(" ------------------------------------------------------")
        print(" ------------------ UNRECOGNISED ARGS -----------------")
        pprint.pprint(excluded_args, indent=2)
        print(" ------------------------------------------------------")

        system = ExperimentBuilder(model, tasks, datasets, device,
                                   builder_args)
        system.load_pretrained()

        model.set_mode('test')

        if builder_args["model"] == 'simpleshot':
            system.model.set_train_mean(system.datasets['train'])

        name = original_args[
            'name'] if 'name' in original_args else builder_args['model']
        tie_breaker = 0
        name_proposal = name
        while name_proposal in loaded_models:
            tie_breaker += 1
            name_proposal = "{}({})".format(name, tie_breaker)
        loaded_models[name_proposal] = system

    return loaded_models, datasets