Exemple #1
0
def load_experiment(experiment_dir: Path = None, flags=None, _id: str = None, epoch=None, add_args=None):
    """Load experiment with flags and trained model from old experiment."""
    if add_args is None:
        add_args = {}

    if not flags:
        dataset = _id.split('_')[0].lower()
        flags_setup = BaseFlagsSetup(get_config_path(dataset=dataset))
        if experiment_dir is not None:
            flags_path = experiment_dir / Path('flags.rar')
        if not experiment_dir is None and flags_path.exists():
            flags = flags_setup.load_old_flags(flags_path, add_args={'save_figure': False, **add_args})
        else:
            flags = flags_setup.load_old_flags(_id=_id, add_args={'save_figure': False, **add_args})
    exp = get_experiment(flags)

    if experiment_dir and (experiment_dir / 'checkpoints').exists():
        latest_checkpoint = epoch if epoch \
            else max(int(d.name) for d in (experiment_dir / 'checkpoints').iterdir() if d.name.isdigit())

        print(f'loading checkpoint from epoch {latest_checkpoint}.')

        latest_checkpoint_path = experiment_dir / 'checkpoints' / str(latest_checkpoint).zfill(4)
        exp.mm_vae.load_networks(latest_checkpoint_path)
    else:
        # load networks from database
        exp.mm_vae = exp.experiments_database.load_networks_from_db(exp.mm_vae)

    return exp
Exemple #2
0
def compress_experiment_run_dir(flags) -> None:
    """
    Move zipped experiment_dir_run in TMPDIR to experiment_dir.
    """
    dir_experiment = Path(
        json2dict(
            get_config_path(flags=flags))['dir_experiment']).expanduser()
    dir_experiment.mkdir(exist_ok=True)

    # zip dir_experiment_run
    log.info(
        f'zipping {flags.dir_experiment_run} '
        f'to {(Path(dir_experiment) / flags.experiment_uid).with_suffix(".zip")}.'
    )
    dir_experiment_zipped = (dir_experiment / flags.experiment_uid)

    shutil.make_archive(dir_experiment_zipped,
                        'zip',
                        flags.dir_experiment_run,
                        verbose=True)

    assert dir_experiment_zipped.with_suffix('.zip').exists(), f'{dir_experiment_zipped} does not exist. ' \
                                                               f'Zipping of dir_experiment_run failed.'
    # delete not compressed experiment dir
    shutil.rmtree(str(flags.dir_experiment_run))
Exemple #3
0
def load_experiment_from_db(_id: str, add_args=None):
    """Load experiment with flags and trained model from old experiment."""

    dataset = _id.split('_')[0].lower()
    flags_setup = BaseFlagsSetup(get_config_path(dataset=dataset))
    flags = flags_setup.load_old_flags(_id=_id, add_args={'save_figure': False, **add_args})
    exp = get_experiment(flags)

    # load networks from database
    exp.mm_vae = exp.experiments_database.load_networks_from_db(exp.mm_vae)

    return exp
Exemple #4
0
from mmvae_hub import log
from mmvae_hub.celeba.CelebaTrainer import CelebaTrainer
from mmvae_hub.celeba.experiment import CelebaExperiment
from mmvae_hub.celeba.flags import parser, CelebaFlagsSetup
from mmvae_hub.leomed_utils.boilerplate import compress_experiment_run_dir

from mmvae_hub.utils.setup.flags_utils import get_config_path

DATASET = 'celeba'

if __name__ == '__main__':

    flags = parser.parse_args()
    flags_setup = CelebaFlagsSetup(
        get_config_path(dataset=DATASET, flags=flags))
    flags = flags_setup.setup(flags, additional_args={'dataset': DATASET})

    with maybe_norby(flags.norby,
                     f'Starting Experiment {flags.experiment_uid}.',
                     f'Experiment {flags.experiment_uid} finished.'):
        mst = CelebaExperiment(flags)
        mst.set_optimizer()
        trainer = CelebaTrainer(mst)
        trainer.run_epochs()

    log.info('Done.')
    # move zipped experiment_dir_run in TMPDIR to experiment_dir
    if flags.leomed:
        compress_experiment_run_dir(flags)
        return {_class: i for i, _class in enumerate(self.classes)}

    def _check_exists_mnist(self):
        return (Path(self.processed_folder) / self.training_file_mnist).exists() and (
                Path(self.processed_folder) / self.test_file_mnist).exists()

    def _check_exists_svhn(self):
        return (os.path.exists(os.path.join(self.dir_svhn,
                                            self.training_file_svhn)) and
                os.path.exists(os.path.join(self.dir_svhn,
                                            self.test_file_svhn)))

    @staticmethod
    def extract_gzip(gzip_path, remove_finished=False):
        print('Extracting {}'.format(gzip_path))
        with open(gzip_path.replace('.gz', ''), 'wb') as out_f, \
                gzip.GzipFile(gzip_path) as zip_f:
            out_f.write(zip_f.read())
        if remove_finished:
            os.unlink(gzip_path)

    def extra_repr(self):
        return "Split: {}".format("Train" if self.train is True else "Test")


if __name__ == '__main__':
    config = json2dict(Path(get_config_path(dataset='mnistsvhntext')))
    download_zip_from_url(
        url='https://www.dropbox.com/sh/lx8669lyok9ois6/AADMhr3EluBXJyZnV1_lYntTa/data_mnistsvhntext.zip?dl=1',
        dest_folder=Path(config['dir_data']).expanduser().parent, verbose=True)
Exemple #6
0

if __name__ == "__main__":
    # parser = argparse.ArgumentParser()
    # parser.add_argument('--seed', type=int, default=42)
    # parser.add_argument('--num-modalities', type=int, default=5)
    # parser.add_argument('--savepath-train', type=str, required=True)
    # parser.add_argument('--savepath-test', type=str, required=True)
    # parser.add_argument('--backgroundimagepath', type=str, required=True)
    # args = parser.parse_args()  # use vars to convert args into a dict
    # print("\nARGS:\n", args)
    from dataclasses import dataclass
    from pathlib import Path


    config = json2dict(get_config_path())


    @dataclass
    class Args:
        savepath_train: Path = Path(config['dir_data']) / 'train'
        savepath_test: Path = Path(config['dir_data']) / 'test'
        backgroundimagepath: Path = Path(__file__).parent / 'polymnist_background_images'
        num_modalities: int = 5


    args = Args()
    # create dataset
    PolymnistDataset.create_polymnist_dataset(args.savepath_train, args.backgroundimagepath, args.num_modalities,
                                              train=True)
    PolymnistDataset.create_polymnist_dataset(args.savepath_test, args.backgroundimagepath, args.num_modalities,
Exemple #7
0
    "dataloader_workers": [16],
    "max_beta": [0.],
    "beta_start_epoch": [0.],
    "beta_warmup": [50],
    # "num_gfm_flows": [3],
    # "coupling_dim": [32],
    "coupling_dim": [64],
    "num_gfm_flows": [3],
    "nbr_coupling_block_layers": [8],
    "end_epoch": [150],
    "calc_nll": [False],
    "K": [1],
    "eval_freq": [150],
}

if __name__ == '__main__':

    for grid in [sp_mopoe_mimic]:
        for sp in ParameterGrid(grid):
            # for _ in [1]:
            flags = parser.parse_args()
            flags_setup = MimicFlagsSetup(
                get_config_path(dataset='mimic', flags=flags))
            flags = flags_setup.setup(flags, additional_args=sp)
            with norby(f'Starting Experiment {flags.experiment_uid}.',
                       f'Experiment {flags.experiment_uid} finished.'):
                mst = MimicExperiment(flags)
                mst.set_optimizer()
                trainer = MimicTrainer(mst)
                trainer.run_epochs()
Exemple #8
0
        for l, line in enumerate(lines):
            width, height = font.getsize(line)
            draw.text((0, (h / 2) - (num_lines / 2 - l) * height),
                      line, (0, 0, 0),
                      font=font)
            y_text += height
        if imgsize[0] == 3:
            text_pil = transforms.ToTensor()(pil_img.resize(
                (imgsize[1], imgsize[2]), Image.ANTIALIAS))
        else:
            text_pil = transforms.ToTensor()(pil_img.resize(
                (imgsize[1], imgsize[2]), Image.ANTIALIAS).convert('L'))
        return text_pil


if __name__ == '__main__':

    config = json2dict(get_config_path(dataset='celeba'))

    img_clf_path = Path(config['dir_clf']).expanduser() / 'clf_celeba_text.pth'
    img_clf_path.parent.mkdir(exist_ok=True, parents=True)
    if not img_clf_path.exists():
        print(
            f'text clf not found under {img_clf_path}. Parent folder contains: {list(img_clf_path.parent.iterdir())}'
        )
        download_from_url(
            url=
            'https://www.dropbox.com/sh/lx8669lyok9ois6/AACaBy1YNNq3ebh149k_EXrca/trained_classifiers/trained_clfs_celeba/clf_m2?dl=1',
            dest_path=img_clf_path,
            verbose=True)
Exemple #9
0
    # "factorized_representation": [True],
    # "beta": [5.],
    # "beta_style": [2.0],
    # "beta_content": [1.0],
    # "beta_m1_style": [1.0],
    # "beta_m2_style": [5.0],
    # "style_img_dim": [32],
    # "style_text_dim": [32],
}

if __name__ == '__main__':
    dataset = 'celeba'

    for grid in [sp_joint_elbo_article]:
        for sp in ParameterGrid(grid):
            # for _ in [1]:
            flags = parser.parse_args()
            flags_setup = CelebaFlagsSetup(
                get_config_path(dataset='celeba', flags=flags))
            flags = flags_setup.setup(flags,
                                      additional_args={
                                          **sp, 'dataset': dataset
                                      })

            with norby(f'Starting Experiment {flags.experiment_uid}.',
                       f'Experiment {flags.experiment_uid} finished.'):
                mst = CelebaExperiment(flags)
                mst.set_optimizer()
                trainer = CelebaTrainer(mst)
                trainer.run_epochs()
Exemple #10
0
from norby.utils import maybe_norby

from mmvae_hub.leomed_utils.boilerplate import compress_experiment_run_dir
from mmvae_hub.mimic.MimicTrainer import MimicTrainer
from mmvae_hub.mimic.experiment import MimicExperiment
from mmvae_hub.mimic.flags import parser, MimicFlagsSetup
from mmvae_hub.utils.setup.flags_utils import get_config_path

DATASET = 'mimic'

if __name__ == '__main__':

    flags = parser.parse_args()
    flags.dataset = 'mimic'
    flags_setup = MimicFlagsSetup(get_config_path(dataset=DATASET,
                                                  flags=flags))
    flags = flags_setup.setup(flags, additional_args={'dataset': DATASET})

    with maybe_norby(flags.norby,
                     f'Starting Experiment {flags.experiment_uid}.',
                     f'Experiment {flags.experiment_uid} finished.'):
        exp = MimicExperiment(flags)
        exp.set_optimizer()
        trainer = MimicTrainer(exp)
        trainer.run_epochs()

    # move zipped experiment_dir_run in TMPDIR to experiment_dir
    if flags.leomed:
        compress_experiment_run_dir(flags)
    method = 'mopgfm'
    flags = parser.parse_args()

    study_name = f'hyperopt-{method}2'

    # storage_sqlite = optuna.storages.RDBStorage("sqlite:///hyperopt.db", heartbeat_interval=1)
    # study = optuna.create_study(direction="maximize", storage=storage_sqlite,
    #                             study_name=f"distributed-hyperopt-{flags.method}")

    postgresql_storage_address = "postgresql://klugh@ethsec-login-03:5433/distributed_hyperopt"

    try:
        study = optuna.load_study(study_name=study_name,
                                  storage=postgresql_storage_address)
    except:
        study = optuna.create_study(direction="maximize",
                                    storage=postgresql_storage_address,
                                    study_name=study_name)

    flags.dir_experiment = Path(flags.dir_experiment) / 'optuna'
    flags_setup = FlagsSetup(get_config_path(dataset=dataset, flags=flags))
    trainer = HyperoptTrainer(flags,
                              flags_setup,
                              dataset=dataset,
                              method=method)
    study.optimize(trainer.hyperopt, n_trials=100, gc_after_trial=True)
    print("Best trial:")
    print(study.best_params)
    with open('hyperopt_best_results.json', 'w') as jsonfile:
        json.dump(study.best_params, jsonfile)
Exemple #12
0
search_space1 = {
    'method': 'mopoe',
    "initial_learning_rate": 0.0005,
    'class_dim': 640,
    "min_beta": 0,
    "max_beta": 2.0,
    "beta_warmup": 0,
    "num_gfm_flows": 3,
    "num_mods": 3,
    "end_epoch": 10,
    "eval_freq": 10,
    "data_multiplications": 1
}

if __name__ == '__main__':

    for grid in [search_spaces_amortized]:
        for sp in ParameterGrid(grid):
            # for sp in [grid]:
            # for _ in [1]:
            flags = parser.parse_args()
            flags_setup = mnistsvhntextFlagsSetup(
                get_config_path(dataset='mnistsvhntext', flags=flags))
            flags = flags_setup.setup(flags, additional_args=sp)
            with norby(f'Starting Experiment {flags.experiment_uid}.',
                       f'Experiment {flags.experiment_uid} finished.'):
                mst = MNISTSVHNText(flags)
                mst.set_optimizer()
                trainer = mnistsvhnTrainer(mst)
                trainer.run_epochs()
            #         (experiment_dir / 'checkpoints').iterdir()) == 0:
            #     print(f'removing dir {experiment_dir}')
            #     shutil.rmtree(experiment_dir)
            #
            # elif (max(int(d.name) for d in (experiment_dir / 'checkpoints').iterdir() if d.name.startswith('0')) < 10):
            #     print(f'removing dir {experiment_dir}')
            #     shutil.rmtree(experiment_dir)


def clean_early_checkpoints(parent_folder: Path):
    for experiment_dir in parent_folder.iterdir():
        checkpoints_dir = parent_folder / experiment_dir / 'checkpoints/0*'
        checkpoints = glob.glob(checkpoints_dir.__str__())
        checkpoint_epochs = sorted(
            [Path(checkpoint).stem for checkpoint in checkpoints])
        for checkpoint in checkpoints:
            if Path(checkpoint).stem != checkpoint_epochs[-1]:
                shutil.rmtree(checkpoint)


if __name__ == '__main__':
    clean_database()
    clean_database_model_checkpoints()
    config_path_polymnist = get_config_path(dataset='polymnist')
    config_path_mimic = get_config_path(dataset='mimic')
    for config_path in [config_path_polymnist, config_path_polymnist]:
        with open(config_path, 'rt') as json_file:
            config = json.load(json_file)

        clean_exp_dirs(config)
Exemple #14
0
    "initial_learning_rate": [0.001],
    "end_epoch": [1],
    "eval_freq": [1],
    "calc_nll": [False],
}
search_space_sylvester = {
    'method': ['mopoe'],
    'max_beta': [1.],
    'class_dim': [64],
    "num_mods": [1],
    "num_flows": [5],
    "initial_learning_rate": [0.0005],
    "end_epoch": [1],
    "eval_freq": [1],
    "calc_nll": [False],
}

if __name__ == '__main__':

    for grid in [search_spaces_amortized]:
        for sp in ParameterGrid(grid):
            # for _ in [1]:
            flags = parser.parse_args()
            flags_setup = FlagsSetup(get_config_path(dataset='polymnist', flags=flags))
            flags = flags_setup.setup(flags, additional_args=sp)
            with norby(f'Starting Experiment {flags.experiment_uid}.', f'Experiment {flags.experiment_uid} finished.'):
                mst = PolymnistExperiment(flags)
                mst.set_optimizer()
                trainer = PolymnistTrainer(mst)
                trainer.run_epochs()