Example #1
0
File: run.py Project: LoSealL/srcnn
                  build_model=build_model,
                  optimizer=optimizer,
                  loss=loss,
                  lr_sub_size=param['lr_sub_size'],
                  lr_sub_stride=param['lr_sub_stride'],
                  random=random,
                  save_dir=Path('./results') / save_dir)

if args.export_only:
    expt.export_pb_model(['input_lr'], ['output_hr'],
                         Path('./results') / save_dir / 'model.pb', export_bgr)
    exit()

# Training
expt.train(train_set=param['train_set'],
           val_set=param['val_set'],
           epochs=param['epochs'],
           batch_size=param['batch'],
           resume=True)

# Evaluation
if 'test_sets' in param:
    for test_set in param['test_sets']:
        expt.test(test_set=test_set)
if 'test_files' in param:
    expt.test_file(param['test_files'])

# Export tensorflow .pb model
expt.export_pb_model(['input_lr'], ['output_hr'],
                     Path('./results') / save_dir / 'model.pb', export_bgr)
Example #2
0
parser = argparse.ArgumentParser()
parser.add_argument('param_file', type=Path)
args = parser.parse_args()
param = json.load(args.param_file.open())

# Model
scale = param['scale']
build_model = partial(get_model(param['model']['name']),
                      **param['model']['params'])
if 'optimizer' in param:
    optimizer = getattr(optimizers, param['optimizer']['name'].lower())
    optimizer = optimizer(**param['optimizer']['params'])
else:
    optimizer = 'adam'

# Data
load_set = partial(load_set,
                   lr_sub_size=param['lr_sub_size'],
                   lr_sub_stride=param['lr_sub_stride'])

# Training
expt = Experiment(scale=param['scale'], load_set=load_set,
                  build_model=build_model,optimizer=optimizer,
                  save_dir=param['save_dir'])
expt.train(train_set=param['train_set'], val_set=param['val_set'],
           epochs=param['epochs'], resume=True)

# Evaluation
for test_set in param['test_sets']:
    expt.test(test_set=test_set)
Example #3
0
File: run.py Project: zfang92/srcnn
from toolbox.image import bicubic_resize

# Model
scale = 3
model = espcn(c=1, f1=9, f2=1, f3=5, n1=64, n2=32)
model.summary()

# Data
train_set = '91-image'
val_set = 'Set5'
test_sets = ['Set5', 'Set14']
preprocess = partial(bicubic_resize, size=scale)
load_set = partial(load_set,
                   sub_size=11,
                   sub_stride=5,
                   scale=scale,
                   channel=0,
                   preprocess=preprocess)

# Training
experiment = Experiment(scale=scale,
                        model=model,
                        preprocess=preprocess,
                        load_set=load_set,
                        save_dir='.')
experiment.train(train_set=train_set, val_set=val_set, epochs=500, resume=True)

# Evaluation
for test_set in test_sets:
    experiment.test(test_set=test_set)
Example #4
0
                      **param['model']['params'])
if 'optimizer' in param:
    optimizer = getattr(optimizers, param['optimizer']['name'].lower())
    optimizer = optimizer(**param['optimizer']['params'])
else:
    optimizer = 'adam'

lr_block_size = tuple(param['lr_block_size'])

# Data
load_train_set = partial(load_train_set,
                         lr_sub_size=param['lr_sub_size'],
                         lr_sub_stride=param['lr_sub_stride'])

# Training
expt = Experiment(scale=param['scale'],
                  load_set=load_train_set,
                  build_model=build_model,
                  optimizer=optimizer,
                  save_dir=param['save_dir'])
print('training process...')
expt.train(train_set=param['train_set'],
           val_set=param['val_set'],
           epochs=param['epochs'],
           resume=True)

# Evaluation
print('evaluation process...')
for test_set in param['test_sets']:
    expt.test(test_set=test_set, lr_block_size=lr_block_size)