name = 'new_baseline_hengs_LessAugs_211_Mu10_Wd0_fit160epochs'

logger = CSVLogger(learn, name)

# learn.unfreeze()


# In[ ]:


# learn.fit_one_cycle(
#     64,
#     max_lr=.01,
#     wd=0.,
#     pct_start=0.0,
#     div_factor=100,
#     callbacks=[logger, SaveModelCallback(learn, monitor='metric_tot', mode='max', name=name), MixUpCallback(learn, alpha=1.)]
# )

learn.fit(
    160,
    lr=.05,
    wd=0.,
    callbacks=[
        logger, 
        SaveModelCallback(learn, monitor='metric_tot', mode='max', name=name), 
        # MixUpCallback(learn, alpha=1.),
        ReduceLROnPlateauCallback(learn, patience=5, factor=0.5, min_lr=.0001)
    ]
)

# ==================================================================

classifier = Seresnext50MishFrac()

logging_name = 'new_baseline_seresnext50_Mish_Frac'

learn = Learner(
    data_bunch,
    classifier,
    loss_func=Loss_combine_weighted_v2(),
    opt_func=Over9000,
    metrics=[Metric_grapheme(), Metric_vowel(), Metric_consonant(), Metric_tot()]
)

logger = CSVLogger(learn, logging_name)

learn.clip_grad = 1.0

# ==================================================================

learn.fit_one_cycle(
    120,
    max_lr=1e-2,
    wd=0.,
    pct_start=0.0,
    div_factor=100,
    callbacks=[logger, SaveModelCallback(learn, monitor='metric_tot', mode='max', name=logging_name), MixUpCallback(learn, alpha=1)]
)
Пример #3
0
# =========================================================================================

classifier = mdl_ResDenHybrid()

class SGD_m5(SGD):
    def __init__(self, *args, **kwargs):
        super().__init__(momentum=0.5, *args, **kwargs)

learn = Learner(
    data_bunch,
    classifier,
    loss_func=Loss_combine_weighted_v2(),
    opt_func=SGD_m5,
    metrics=[Metric_grapheme(), Metric_vowel(), Metric_consonant(), Metric_tot()]
)

name = 'mdl_ResDenHybrid_sgd_lessaugs_mucm_fixed_raw_onecycle_fld1of5'

logger = CSVLogger(learn, name)

# =========================================================================================

learn.fit_one_cycle(
    160,
    max_lr=0.05,
    wd=0.0,
    pct_start=0.0,
    div_factor=50.,
    final_div=100.,
    callbacks=[logger, SaveModelCallback(learn, monitor='metric_tot', mode='max', name=name), MuCmCallback(learn)]
)
Пример #4
0
learn = Learner(
    data_bunch,
    classifier,
    #loss_func=Loss_single(),
    loss_func=AdvancedLoss_Single(),
    opt_func=Over9000,
    metrics=[Metric_grapheme()]
)

logger = CSVLogger(learn, logging_name)

learn.clip_grad = 1.0
# learn.split([classifier.cls])
learn.unfreeze()


# In[9]:


learn.fit_one_cycle(
    64,
#     max_lr=slice(0.2e-2, 1e-2),
    max_lr=1e-2,
    wd=0.,
    pct_start=0.0,
    div_factor=100,
    callbacks=[logger, SaveModelCallback(learn, monitor='metric_idx', mode='max', name=logging_name)]
)