Exemplo n.º 1
0
def parse_args(parser):
    args, _ = parser.parse_known_args()
    assert args.pipeline, 'set pipeline module'
    pipeline = get_module(args.pipeline)()
    pipeline.export_args(parser)

    # override defaults
    if args.config:
        with open(args.config) as f:
            config = yaml.load(f)

        parser.set_defaults(**config)

    return parser.parse_args(), parser.parse_args([])
Exemplo n.º 2
0
def load_pipeline(checkpoint, args_to_update=None):
    ckpt = torch.load(checkpoint, map_location='cpu')

    assert 'args' in ckpt

    if args_to_update:
        ckpt['args'].update(args_to_update)

    try:
        args = munch.munchify(ckpt['args'])

        pipeline = get_module(args.pipeline)()
        pipeline.create(args)
    except AttributeError as err:
        print('\nERROR: Checkpoint args is incompatible with this version\n',
              file=sys.stderr)
        raise err

    load_model_checkpoint(checkpoint, pipeline.get_net())

    return pipeline, args
Exemplo n.º 3
0
            args.ignore_changed_args += ['config', 'pipeline']
        exper_name = get_experiment_name(args, default_args, args.ignore_changed_args)
        exper_dir = make_experiment_dir(args.save_dir)

        writer = SummaryWriter(log_dir=exper_dir, flush_secs=10)
        iter_cb = TrainIterCb(args, writer)

        setup_logging(exper_dir)

        print(f'experiment dir: {exper_dir}')

    print_args(args, default_args)

    args = eval_args(args)

    pipeline = get_module(args.pipeline)()
    pipeline.create(args)

    required_attributes = ['model', 'ds_train', 'ds_val', 'optimizer', 'criterion']
    check_pipeline_attributes(pipeline, required_attributes)

    # lr_scheduler = [torch.optim.lr_scheduler.ReduceLROnPlateau(o, patience=3, factor=0.5, verbose=True) for o in pipeline.optimizer]
    lr_scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(pipeline.optimizer, patience=3, factor=0.5, verbose=True)

    if args.net_ckpt:
        print(f'LOAD NET CHECKPOINT {args.net_ckpt}')
        load_model_checkpoint(args.net_ckpt, pipeline.get_net())
    
    if hasattr(pipeline.model, 'ray_block') and pipeline.model.ray_block is not None:
        if hasattr(args, 'ray_block_ckpt') and args.ray_block_ckpt:
            print(f'LOAD RAY BLOCK CHECKPOINT {args.ray_block_ckpt}')