Exemplo n.º 1
0
 def run(self):
     try:
         path = replace_standard_paths('{path_tmp}/tmp.run_config')
         self.interactive.to_json(path)
         Main.new_task(path).run()
     except Exception as e:
         LoggerManager().get_logger().error(str(e), exc_info=e)
         tkm.showwarning(message=str(e))
Exemplo n.º 2
0
    def test_deterministic_method(self):
        """
        make sure that using the same seed results in the same outcome when:
            - using deterministic cudnn settings
            - using a single worker for data loading
        note that this does not test:
            - the lightning trainer
            - multi gpu setups, ddp distribution
            - advanced augmentation strategies, e.g. mixup
        """
        for i, config in enumerate([
                super1_fairnas,
                super3,
                search_darts_config,
                retrain_darts_cifar_config,
        ]):
            for seed in range(2):
                args_changes = {
                    "{cls_task}.seed": seed,
                    "{cls_task}.is_deterministic": True,
                    "{cls_task}.is_test_run": True,
                    "{cls_task}.save_del_old": True,
                    "{cls_device}.num_devices": 1,
                    "{cls_device}.use_cudnn_benchmark": False,
                    "cls_trainer": "SimpleTrainer",
                    "{cls_trainer}.max_epochs": 1,
                    "cls_data": "Cifar10Data",
                    "{cls_data}.fake": False,
                    "{cls_data}.batch_size_train": 2,
                    "{cls_data}.batch_size_test": -1,
                    "{cls_data}.dir": "{path_data}/cifar_data/",
                    "{cls_data}.download": True,
                    "{cls_data}.num_workers": 0,
                    "{cls_schedulers#0}.warmup_epochs": 0,
                }
                exp1 = Main.new_task(config,
                                     args_changes=args_changes.copy()).run()
                name = exp1.get_method().__class__.__name__
                data = exp1.get_method().data_set.sample_random_data(
                    batch_size=4).cuda()
                outputs1 = [
                    o.clone().detach() for o in exp1.get_method()(data)
                ]
                del exp1

                exp2 = Main.new_task(config, args_changes=args_changes).run()
                outputs2 = [
                    o.clone().detach() for o in exp2.get_method()(data)
                ]
                del exp2

                for o1, o2 in zip(outputs1, outputs2):
                    self._assert_same_tensors(
                        'i=%d seed=%d method=%s' % (i, seed, name), o1, o2)
Exemplo n.º 3
0
    def test_search_param_updates(self):
        """
        set gradients of network/architecture weights to zero and ensure that they are not updated, while the others are

        expecting two arc weights:
            28x8, with gradients, the actual weights
            28x8, not requiring gradients, only masks
        """
        for mask_idx in range(2):
            exp1 = Main.new_task(
                search_darts_config,
                args_changes={
                    "test_mask_idx": mask_idx,
                    "cls_method": "TestMaskGradientsSearchMethod",
                    "{cls_task}.is_test_run": True,
                    "{cls_trainer}.max_epochs": 1,
                    "cls_data": "Cifar10Data",
                    "{cls_data}.fake": True,
                    "{cls_data}.download": False,
                    "{cls_data}.batch_size_train": 2,
                    "{cls_data}.batch_size_test": -1,
                    "{StackedCellsNetworkBody}.cell_order": "n, r, n, r, n",
                    "cls_optimizers": "DebugOptimizer, DebugOptimizer",
                    # "cls_optimizers": "SGDOptimizer, SGDOptimizer",
                    "{cls_optimizers#0}.weight_decay": 0.0,
                    "{cls_optimizers#1}.weight_decay": 0.0,
                },
                raise_unparsed=False)

            # get initial weights, make copies
            optimizers, _ = exp1.get_method().configure_optimizers()
            assert len(optimizers) == 1, "expecting only one multi optimizer"
            weights = [
                opt.param_groups[0]['params'][0]
                for opt in optimizers[0].optimizers
            ]
            weight_copies = [w.clone().detach().cpu() for w in weights]
            s = ['Network', 'Architecture']

            for i, w in enumerate(weights):
                print('%s sample-weight shape:' % s[i], w.shape)

            # run, thus change weights
            exp1.run()

            for i, (w, w_copy) in enumerate(zip(weights, weight_copies)):
                diff = (w.cpu() - w_copy).abs().sum().item()
                if i == mask_idx:
                    if diff > 0.00001:
                        print(w[0:3, 0:3])
                        print(w_copy[0:3, 0:3])
                        assert False, '%s gradients were masked but weights changed anyway; diff: %s' % (
                            s[i], diff)
                else:
                    if diff < 0.00001:
                        print(w[0:3, 0:3])
                        print(w_copy[0:3, 0:3])
                        assert False, '%s gradients were not masked but weights did not change; diff: %s' % (
                            s[i], diff)
            del exp1
Exemplo n.º 4
0
def assert_stats_match(name,
                       task_cfg,
                       cfg: dict,
                       num_params=None,
                       num_macs=None):
    cfg_dir = replace_standard_paths('{path_tmp}/tests/cfgs/')
    cfg_path = Builder.save_config(cfg, cfg_dir, name)
    exp = Main.new_task(
        task_cfg,
        args_changes={
            '{cls_data}.fake': True,
            '{cls_data}.batch_size_train': 2,
            '{cls_data}.batch_size_test': -1,
            '{cls_task}.is_test_run': True,
            '{cls_task}.save_dir': '{path_tmp}/tests/workdir/',
            "{cls_network}.config_path": cfg_path,
            "{cls_trainer}.ema_decay": -1,
            'cls_network_heads':
            'ClassificationHead',  # necessary for the DARTS search space to disable the aux heads
        },
        raise_unparsed=False)
    net = exp.get_method().get_network()
    macs = exp.get_method().profile_macs()
    net.eval()
    # print(net)
    cp = count_parameters(net)
    if num_params is not None:
        assert cp == num_params, 'Got unexpected num params for %s: %d, expected %d, diff: %d'\
                                 % (name, cp, num_params, abs(cp - num_params))
    if num_macs is not None:
        assert macs == num_macs, 'Got unexpected num macs for %s: %d, expected %d, diff: %d'\
                                 % (name, macs, num_macs, abs(macs - num_macs))
Exemplo n.º 5
0
def generate_from_name(name: str, save=True, verbose=True):
    genotype, compact = compact_from_name(name, verbose=verbose)
    run_configs = '{path_conf_tasks}/d1_dartsv1.run_config, {path_conf_net_search}darts.run_config'
    # create weight sharing cell model
    changes = {
        'cls_data':
        'Cifar10Data',
        '{cls_data}.fake':
        True,
        '{cls_task}.save_del_old':
        False,
        '{cls_network_body}.cell_order':
        'n, r',
        '{cls_network_body}.features_first_cell':
        36 * 4,
        '{cls_network_stem}.features':
        36 * 3,
        'cls_network_cells_primitives':
        "%s, %s" % (compact.get('primitives'), compact.get('primitives')),
    }
    task = Main.new_task(run_configs, args_changes=changes)
    net = task.get_method().get_network()
    args = task.args

    wss = StrategyManager().get_strategies()
    assert len(wss) == 1
    ws = wss[list(wss.keys())[0]]

    # fix arc, all block inputs use different weights
    # go through all weights in the search cell
    for n, w in ws.named_parameters_single():
        # figure out cell type ("normal", "reduce"), block index, and if it's the first, second, ... op of that block
        c_type, block_idx, num_inputs, num_idx = n.split('/')[-4:]
        block_idx = int(block_idx.split('-')[-1])
        num_idx = int(num_idx.split('-')[-1])
        # set all paths weights to zero
        w.data.zero_()
        # go through the cell description of the genotype, if input and op number match, set the weight to be higher
        for op_idx, from_idx in compact.get(c_type)[block_idx]:
            if num_idx == from_idx:
                w[op_idx] = 1
    ws.forward()

    # saving config now will only use the highest weighted connections, since we have a search network
    cfg = net.config(finalize=True, num_block_ops=2)
    if save:
        path = Builder.save_config(cfg, get_net_config_dir(genotype.source),
                                   name)
        print('Saved config: %s' % path)
    return net, cfg, args
Exemplo n.º 6
0
def visualize_config(config: dict, save_path: str):
    save_path = replace_standard_paths(save_path)
    cfg_path = Builder.save_config(config, replace_standard_paths('{path_tmp}/viz/'), 'viz')
    exp = Main.new_task(run_config, args_changes={
        '{cls_data}.fake': True,
        '{cls_data}.batch_size_train': 4,
        '{cls_task}.is_test_run': True,
        '{cls_task}.save_dir': '{path_tmp}/viz/task/',
        '{cls_task}.save_del_old': True,
        "{cls_network}.config_path": cfg_path,
    })
    net = exp.get_method().get_network()
    vt = VizTree(net)
    vt.print()
    vt.plot(save_path + 'net', add_subgraphs=True)
    print('Saved cell viz to %s' % save_path)
Exemplo n.º 7
0
def visualize_config(config: dict, save_path: str):
    save_path = replace_standard_paths(save_path)
    cfg_path = Builder.save_config(config, replace_standard_paths('{path_tmp}/viz/'), 'viz')
    exp = Main.new_task(run_config, args_changes={
        '{cls_data}.fake': True,
        '{cls_data}.batch_size_train': 2,
        '{cls_task}.is_test_run': True,
        '{cls_task}.save_dir': '{path_tmp}/viz/task/',
        '{cls_task}.save_del_old': True,
        "{cls_task}.note": "viz",
        "{cls_network}.config_path": cfg_path,
    })
    net = exp.get_method().get_network()
    for s in ['n', 'r']:
        for cell in net.get_cells():
            if cell.name.startswith(s):
                visualize_cell(cell, save_path, s)
                break
    print('Saved cell viz to %s' % save_path)
Exemplo n.º 8
0
    def generate(self,
                 save_dir: str,
                 name: str,
                 verbose=True) -> (nn.Module, dict, Namespace):
        run_configs = '{path_conf_tasks}/s1_random.run_config, {path_conf_net_search}/%s.run_config' % self.search_net
        task = Main.new_task(run_configs,
                             args_changes={
                                 '{cls_data}.fake': True,
                                 '{cls_task}.save_del_old': False,
                                 '{cls_task}.save_dir': '{path_tmp}/generate/',
                                 '{cls_trainer}.ema_decay': -1,
                             })
        net = task.get_method().get_network()
        args = task.args

        # fix arc
        net.forward_strategy(fixed_arc=self.gene)

        cfg = self.save(net, save_dir, name, verbose)
        return net, cfg, args
Exemplo n.º 9
0
    # "{cls_initializers#0}.strict": True,

    "cls_criterion": "CrossEntropyCriterion",

    "cls_optimizers": "SGDOptimizer",
    "{cls_optimizers#0}.lr": 0.00,
    "{cls_optimizers#0}.momentum": 0.9,
    "{cls_optimizers#0}.accumulate_batches": 2,

    "cls_schedulers": "CosineScheduler",
    "{cls_schedulers#0}.warmup_epochs": 2,
    "{cls_schedulers#0}.warmup_lr": 0.01,

    "cls_regularizers": "DropOutRegularizer",
    # "cls_regularizers": "DropOutRegularizer, DropPathRegularizer",
    "{cls_regularizers#0}.prob": 0.0,
    # "{cls_regularizers#1}.max_prob": 0.1,

    # "{cls_method}.amp_enabled": False,
    # "{cls_optimizers#0}.weight_decay": 4e-3,
    # "{cls_optimizers#0}.weight_decay_filter": True,
}

if __name__ == "__main__":
    # ignore the command line, use "args" instead
    task = Main.new_task([], args_changes=args)
    # print(task.get_method().get_network())
    task.load()
    # task.load('{path_tmp}/s3_2/')
    task.run()
Exemplo n.º 10
0
config_files = "{path_conf_tasks}/d1_dartsv1.run_config, {path_conf_net_search}/bench201.run_config"
# config_files = "{path_conf_tasks}/d1_asap.run_config, {path_conf_net_search}/bench201.run_config"
# config_files = "{path_conf_tasks}/d1_gdas.run_config, {path_conf_net_search}/bench201.run_config"
# config_files = "{path_conf_tasks}/d1_mdenas.run_config, {path_conf_net_search}/bench201.run_config"

changes = {
    "{cls_task}.is_test_run": True,
    "{cls_task}.save_dir": "{path_tmp}/run_bench_d1/",
    "{cls_task}.save_del_old": True,

    "cls_benchmark": "MiniNASTabularBenchmark",
    "{cls_benchmark}.path": "{path_data}/nats_bench_1.1_mini.pt",
    "{cls_benchmark}.default_data_set": "cifar100",
    "{cls_benchmark}.default_result_type": "test",

    "{cls_trainer}.max_epochs": 4,

    # "cls_data": "Cifar10Data",
    "{cls_data}.fake": True,
    "{cls_data}.dir": "{path_data}/ImageNet_ILSVRC2012/",
    "{cls_data}.batch_size_train": 2,
    "{cls_data}.batch_size_test": -1,
}


if __name__ == "__main__":
    task = Main.new_task(config_files, args_changes=changes)
    task.load()
    task.run()
Exemplo n.º 11
0
    def test_model_save_load(self):
        """
        make sure that saving+loading for methods/models works correctly, so that we get the same outputs after loading
        """

        for i, (config, trainer, ds, change_net, fix_topology) in enumerate([
            (dna1_config, 'SimpleTrainer', 'Imagenet1000Data', False, True),
            (super1_fairnas, 'SimpleTrainer', 'Imagenet1000Data', False, True),
            (search_darts_config, 'SimpleTrainer', 'Cifar10Data', True, False),
            (retrain_darts_cifar_config, 'SimpleTrainer', 'Cifar10Data', True,
             False),

                # (search_darts_config,           'LightningTrainer'),
                # (retrain_darts_cifar_config,    'LightningTrainer'),
        ]):
            save_dir = "{path_tmp}/tests/%d/"
            arg_changes = {
                "cls_data": ds,
                "{cls_data}.fake": True,
                "{cls_data}.batch_size_train": 4,
                "{cls_data}.batch_size_test": 4,
                "cls_trainer": trainer,
                "{cls_trainer}.max_epochs": 2,
                "{cls_task}.seed": 0,
                "{cls_task}.is_test_run": True,
                "{cls_task}.save_dir": save_dir % 1,
                "{cls_task}.save_del_old": True,
                "{cls_schedulers#0}.warmup_epochs": 0,
            }
            if change_net:
                arg_changes.update({
                    "{cls_network_body}.features_first_cell": 8,
                    "{cls_network_body}.cell_order": "n, r, n, r, n",
                    "{cls_network_stem}.features": 4,
                })

            print(config)
            exp1 = Main.new_task(config, args_changes=arg_changes).run()
            data = exp1.get_method().get_data_set().sample_random_data(
                batch_size=4).cuda()
            net = exp1.get_method().get_network()
            if fix_topology and isinstance(net, SearchUninasNetwork):
                net.set_forward_strategy(False)
                net.get_strategy_manager().forward_const(0)
            with torch.no_grad():
                outputs1 = exp1.get_method()(data)

            arg_changes["{cls_task}.save_dir"] = save_dir % 2
            arg_changes["{cls_task}.seed"] += 1
            exp2 = Main.new_task(config, args_changes=arg_changes).run().load(
                save_dir % 1)
            net = exp2.get_method().get_network()
            if fix_topology and isinstance(net, SearchUninasNetwork):
                net.set_forward_strategy(False)
                net.get_strategy_manager().forward_const(0)
            with torch.no_grad():
                outputs2 = exp2.get_method()(data)

            for o1, o2 in zip(outputs1, outputs2):
                self._assert_same_tensors(
                    'i=%d method=%s' %
                    (i, exp1.get_method().__class__.__name__), o1, o2)