コード例 #1
0
    def _read_optimizer(config_data) -> Optimizer:
        optimizer_config: Dict[str, Any] = config_data['optimizer']
        optimizer_name = list(optimizer_config.keys())[0]
        assert optimizer_name in Configuration.KNOWN_OPTIMIZERS, f'Unknown optimizer: {optimizer_name}'

        if optimizer_name == 'adam':
            return Adam.from_config(optimizer_config[optimizer_name])
        elif optimizer_name == 'sgd':
            return SGD.from_config(optimizer_config[optimizer_name])
コード例 #2
0
    ls['out_normals'] = SNMT.loss_nmap(conf['kappa'])
    lsw['out_normals'] = conf['w_normals']
if use_dmap:
    ls['out_depth_maps'] = SNMT.loss_dmap()
    lsw['out_depth_maps'] = conf['w_depth']
if use_pc:
    ls['out_verts'] = SNMT.loss_pcloud()
    lsw['out_verts'] = conf['w_coords']

if args.model_state:
    snmt.model.load_weights(args.model_state, by_name=True)

if args.optim_state:
    with open(args.optim_state, 'rb') as f:
        opt_state = pickle.load(f)
    optimizer = Adam.from_config(opt_state['config'])
    snmt.model.compile(optimizer, loss=ls, loss_weights=lsw)
    snmt.model._make_train_function()
    optimizer.set_weights(opt_state['params'])
    ep_start = helpers.extract_epoch(args.model_state)
else:
    optimizer = Adam(lr=conf['lr'])
    snmt.model.compile(optimizer, loss=ls, loss_weights=lsw)
    ep_start = 1

# LR scheduler, early stopping.
redlr = None
earlstop = None
if conf['red_lr_plateau']:
    redlr = ReduceLROnPlateau(monitor='loss_va',
                              factor=conf['red_lr_factor'],