Ejemplo n.º 1
0
        def on_train_begin(self, **kwargs):
            info = layers_info(self.learner)
            try_mlflow_log(mlflow.log_param, "num_layers", len(info))
            try_mlflow_log(mlflow.log_param, "opt_func",
                           self.opt_func.func.__name__)

            if hasattr(self.opt, "true_wd"):
                try_mlflow_log(mlflow.log_param, "true_wd", self.opt.true_wd)

            if hasattr(self.opt, "bn_wd"):
                try_mlflow_log(mlflow.log_param, "bn_wd", self.opt.bn_wd)

            if hasattr(self.opt, "train_bn"):
                try_mlflow_log(mlflow.log_param, "train_bn", self.train_bn)

            summary = model_summary(self.learner)
            try_mlflow_log(mlflow.set_tag, "model_summary", summary)

            tempdir = tempfile.mkdtemp()
            try:
                summary_file = os.path.join(tempdir, "model_summary.txt")
                with open(summary_file, "w") as f:
                    f.write(summary)
                try_mlflow_log(mlflow.log_artifact, local_path=summary_file)
            finally:
                shutil.rmtree(tempdir)
Ejemplo n.º 2
0
        def on_train_begin(self, **kwargs):
            info = layers_info(self.learner)
            try_mlflow_log(kiwi.log_param, 'num_layers', len(info))
            try_mlflow_log(kiwi.log_param, 'opt_func', self.opt_func.func.__name__)

            if hasattr(self.opt, 'true_wd'):
                try_mlflow_log(kiwi.log_param, 'true_wd', self.opt.true_wd)

            if hasattr(self.opt, 'bn_wd'):
                try_mlflow_log(kiwi.log_param, 'bn_wd', self.opt.bn_wd)

            if hasattr(self.opt, 'train_bn'):
                try_mlflow_log(kiwi.log_param, 'train_bn', self.train_bn)

            summary = model_summary(self.learner)
            try_mlflow_log(kiwi.set_tag, 'model_summary', summary)

            tempdir = tempfile.mkdtemp()
            try:
                summary_file = os.path.join(tempdir, "model_summary.txt")
                with open(summary_file, 'w') as f:
                    f.write(summary)
                try_mlflow_log(kiwi.log_artifact, local_path=summary_file)
            finally:
                shutil.rmtree(tempdir)
Ejemplo n.º 3
0
                    cb_handler=cb_handler)

# %%

# lr_find(learner)

# %%

# learner.recorder.plot()

# %%

fit_one_cycle(learner, 1, max_lr=1e-2)

# %%

from utils.torch_utils import model_info

model_info(model)
learner.model
learner.layer_groups
learner.lr_range(slice(1e-6, 1e-4))
from fastai.callbacks import hooks
hooks.model_summary(learner)

# %%

model_state = model.module.state_dict() if type(
    model) is nn.parallel.DistributedDataParallel else model.state_dict()
torch.save({'model': model_state}, "weights/fastai.pt")