Пример #1
0
def main():
    mean = FLAGS.mean
    std = FLAGS.std

    config_path = os.path.join(FLAGS.model_dir, "config.json")
    if not os.path.isfile(config_path):
        # create the model config
        config = ModelConfig(
            model_name=FLAGS.model_name,
            arch=FLAGS.arch,
            model_n_out=FLAGS.model_n_out,
            sz=FLAGS.sz,
            N=FLAGS.N,
            mean=np.array(mean),
            std=np.array(std),
            meta={"model_file_prefix": FLAGS.model_file_prefix})
        config.toDir(FLAGS.model_dir)

    evaluate_model_dir(FLAGS.model_dir,
                       sampler=None,
                       TRAIN=FLAGS.train_image_dir,
                       LABELS=FLAGS.train_csv)
Пример #2
0
        learn.split([model.head])
        learn.freeze()

        if is_warmup:
            learn.fit_one_cycle(1, lr_warmup)

    except Exception as e:
        print(e)
        pass

    learn.unfreeze()
    train_schedule(learn,
                   epochs,
                   max_lr,
                   cbs=[
                       SaveModelCallback(learn,
                                         name=f'model',
                                         monitor=monitor_metric), slack_cb
                   ],
                   is_oversampling=is_oversampling,
                   lr_sched=lr_sched)
    # save only state dict (weights) - requires the model when loading
    torch.save(learn.model.state_dict(), f'{fold}.pth')

move_models_2_savedir(SAVE_DIR,
                      models=[f'./{fold}.pth' for fold in range(n_folds)])
config.toDir(SAVE_DIR)

sampler.is_train = False
evaluate_model_dir(SAVE_DIR, sampler=sampler)