Example #1
0
def get_learner_rnn_feedback(model_name):
    # Training settings
    bs = 1
    seq_len = 1

    # Datasets and Dataloaders
    trn_lds = LinearFeedbackDataset(df_small_trn, trn_tfms, PATH,
                                    IMAGES_FOLDER)
    val_lds = LinearFeedbackDataset(df_small_val, trn_tfms, PATH,
                                    IMAGES_FOLDER)
    trn_dl = FastaiDataLoader(trn_lds, batch_sampler=trn_lds.batch_sampler())
    val_dl = FastaiDataLoader(val_lds, batch_sampler=val_lds.batch_sampler())

    # Model
    model_folder = "CNNtoRNNFeedback"
    model = CNNtoRNNFeedback(1024,
                             200,
                             2,
                             seq_len,
                             bs,
                             14,
                             use_ground_truth=False)
    layer_groups = [
        list(model.encoder.children())[:6],
        list(model.encoder.children())[6:],
        [model.lstm, model.linear],
    ]

    # opt_fn is used like this: optimizer = opt_fn(trainable_params(model), lr=1e-1)
    opt_fn = partial(optim.SGD, momentum=0.9)
    criterion = F.l1_loss

    learner = Learner(
        MockedData(trn_dl, val_dl),
        CustomModel(model, layer_groups),
        metrics=METRICS,
        opt_fn=opt_fn,
        crit=criterion,
        tmp_name=os.path.join(ROOT, PATH, 'tmp'),
        models_name=os.path.join(ROOT, PATH, 'models', model_folder),
    )
    # clip and reg_fn needs shouldn't be passed to the constructor because it sets as None anyway...
    # learner.reg_fn = partial(seq2seq_reg, alpha=2, beta=1)
    learner.clip = 0.4

    learner.load(model_name)
    learner.model.eval()
    return learner
Example #2
0
def get_learner_rnn(model_name):
    # mem comsuption: 7200 MB
    bs = 1
    seq_len = 1

    # Datasets and Dataloaders
    trn_ds = BatchifiedDataset(df_large_trn, bs, seq_len, trn_tfms, PATH,
                               IMAGES_FOLDER)
    val_ds = BatchifiedDataset(df_large_val, bs, seq_len, trn_tfms, PATH,
                               IMAGES_FOLDER)
    trn_dl = FastaiDataLoader(trn_ds, batch_sampler=trn_ds.batch_sampler())
    val_dl = FastaiDataLoader(val_ds, batch_sampler=val_ds.batch_sampler())

    # Model
    model_folder = "CNNtoRNN_new"
    model = CNNtoRNN(
        encode_size=128,  # 1024
        hidden_size=32,  # 200
        num_layers=2,
        bs=bs,
        output_size=14)
    layer_groups = [
        list(model.encoder.children())[:6],
        list(model.encoder.children())[6:],
        [model.encoder_linear, model.lstm, model.linear],
    ]

    # opt_fn is used like this: optimizer = opt_fn(trainable_params(model), lr=1e-1)
    opt_fn = partial(optim.SGD, momentum=0.9)
    criterion = F.mse_loss

    learner = Learner(
        MockedData(trn_dl, val_dl),
        CustomModel(model, layer_groups),
        metrics=METRICS,
        opt_fn=opt_fn,
        crit=criterion,
        tmp_name=os.path.join(ROOT, PATH, 'tmp'),
        models_name=os.path.join(ROOT, PATH, 'models', model_folder),
    )
    # clip and reg_fn needs shouldn't be passed to the constructor because it sets as None anyway...
    # learner.reg_fn = partial(seq2seq_reg, alpha=2, beta=1)
    learner.clip = 0.4

    learner.load(model_name)
    learner.model.eval()
    return learner
Example #3
0
learner = Learner(
    MockedData(trn_dl, val_dl),
    CustomModel(model, layer_groups),
    metrics=METRICS,
    opt_fn=opt_fn,
    crit=criterion,
    tmp_name=os.path.join(ROOT, PATH, 'tmp'),
    models_name=os.path.join(ROOT, PATH, 'models', model_folder),
)
# no clipping

############
############ Model loading
############

learner.load('6ep_frz_12ep_unfrz')

learner.model.eval()
print_calced_metrics_from_dl(learner.model, val_dl)

######################
###################### Test driving
######################

# import pdb; pdb.set_trace()

def format_indicators(pred_indicators):
    """
    Indicators are formatted differently on the controller
    Cpp controller:
    indicators[0]  = shared->fast;