Esempio n. 1
0
                                                 discretizer,
                                                 normalizer,
                                                 args.batch_size,
                                                 shuffle=False)
else:
    # Set number of batches in one epoch
    train_nbatches = 2000
    val_nbatches = 1000
    if args.small_part:
        train_nbatches = 20
        val_nbatches = 20

    train_data_gen = utils.BatchGen(reader=train_reader,
                                    discretizer=discretizer,
                                    normalizer=normalizer,
                                    partition=args.partition,
                                    batch_size=args.batch_size,
                                    steps=train_nbatches,
                                    shuffle=True)
    val_data_gen = utils.BatchGen(reader=val_reader,
                                  discretizer=discretizer,
                                  normalizer=normalizer,
                                  partition=args.partition,
                                  batch_size=args.batch_size,
                                  steps=val_nbatches,
                                  shuffle=False)
if args.mode == 'train':
    # Prepare training
    path = os.path.join(
        args.output_dir, 'keras_states/' + model.final_name +
        '.chunk{epoch}.test{val_loss}.state')
Esempio n. 2
0
                                                   args.partition, discretizer,
                                                   normalizer, args.batch_size)
    val_data_gen = utils.BatchGenDeepSupervisoin(val_data_loader,
                                                 args.partition, discretizer,
                                                 normalizer, args.batch_size)
else:
    # Set number of batches in one epoch
    train_nbatches = 2000
    val_nbatches = 1000
    if (args.small_part):
        train_nbatches = 20
        val_nbatches = 20

    train_data_gen = utils.BatchGen(reader=train_reader,
                                    discretizer=discretizer,
                                    normalizer=normalizer,
                                    partition=args.partition,
                                    batch_size=args.batch_size,
                                    steps=train_nbatches)
    val_data_gen = utils.BatchGen(reader=val_reader,
                                  discretizer=discretizer,
                                  normalizer=normalizer,
                                  partition=args.partition,
                                  batch_size=args.batch_size,
                                  steps=val_nbatches)
    #val_data_gen.steps = val_reader.get_number_of_examples() // args.batch_size
    #train_data_gen.steps = train_reader.get_number_of_examples() // args.batch_size

if args.mode == 'train':

    # Prepare training
    path = 'keras_states/' + model.final_name + '.chunk{epoch}.test{val_loss}.state'