## print model summary
model.summary()

# Load model weights
n_trained_chunks = 0
if args.load_state != "":
    model.load_weights(args.load_state)
    n_trained_chunks = int(re.match(".*epoch([0-9]+).*", args.load_state).group(1))


# Build data generators
train_data_gen = utils.BatchGen(reader=train_reader,
                                discretizer=discretizer,
                                normalizer=normalizer,
                                ihm_pos=args_dict['ihm_pos'],
                                partition=args.partition,
                                target_repl=target_repl,
                                batch_size=args.batch_size,
                                small_part=args.small_part,
                                shuffle=True)
val_data_gen = utils.BatchGen(reader=val_reader,
                              discretizer=discretizer,
                              normalizer=normalizer,
                              ihm_pos=args_dict['ihm_pos'],
                              partition=args.partition,
                              target_repl=target_repl,
                              batch_size=args.batch_size,
                              small_part=args.small_part,
                              shuffle=False)

if args.mode == 'train':
Example #2
0
                                    normalizer_state)
normalizer.load_params(normalizer_state)

tf.logging.info(str(vars(conf)))
tf.logging.info(str(args))

number_epoch = int(args['number_epoch'])
batch_size = int(args['batch_size'])

if args['mode'] in ['train', 'eval']:
    sp = True if args['mode'] == 'eval' else conf.small_part
    train_data_gen = mt_utils.BatchGen(reader=train_reader,
                                       discretizer=discretizer,
                                       normalizer=normalizer,
                                       batch_size=batch_size,
                                       shuffle=True,
                                       return_names=True,
                                       ihm_pos=48,
                                       partition='custom',
                                       target_repl=False,
                                       small_part=sp)
    eval_data_gen = mt_utils.BatchGen(reader=val_reader,
                                      discretizer=discretizer,
                                      normalizer=normalizer,
                                      batch_size=batch_size,
                                      shuffle=True,
                                      return_names=True,
                                      ihm_pos=48,
                                      partition='custom',
                                      target_repl=False,
                                      small_part=conf.small_part)