示例#1
0
    },
    {
        'network': OGN,
        'net_cfg': {
            'k': 256
        },
        'lr': 1e-2
    },
]

if __name__ == '__main__':
    config_spec = copy.deepcopy(makeTrainer.__kwdefaults__)
    config_spec.update({
        'num_epochs': (lambda cfg: int(np.sqrt(1e7 / cfg['n_train']))),
        'n_train':
        [10, 25, 50, 100, 400, 1000, 3000, 10000, 30000, 100000 - 4000],
    })
    config_spec = argupdated_config(config_spec)
    name = 'data_scaling_dynamics_final'  #config_spec.pop('study_name')
    num_repeats = 3  #config_spec.pop('num_repeats')
    thestudy = Study(Trial, {},
                     study_name=name,
                     base_log_dir=config_spec['trainer_config'].get(
                         'log_dir', None))
    for cfg in best_hypers:
        the_config = copy.deepcopy(config_spec)
        the_config.update(cfg)
        thestudy.run(num_trials=-1 * num_repeats,
                     new_config_spec=the_config,
                     ordered=True)
    print(thestudy.results_df())
示例#2
0
        #'unlab_loader_config':{'batch_size':2000},
        'net_config':{'k':256},'trainer':Classifier,
        'trainer_config':{'log_dir':os.path.expanduser('~/tb-experiments/UCI/t3layer_baseline/'),
        'log_args':{'minPeriod':.01, 'timeFrac':3/10}}#[1,.1,.3,3],}#'advEps':[10,3,1,.3]}
        }
if __name__=='__main__':
    # thestudy = Study(PI_trial,uci_pi_spec2,study_name='uci_baseline2234_')
    # thestudy.run(num_trials=3,ordered=False)
    # #print(thestudy.covariates())
    # covars = thestudy.covariates()
    # covars['test_Acc'] = thestudy.outcomes['test_Acc'].values
    # covars['dev_Acc'] = thestudy.outcomes['dev_Acc'].values
    # print(covars.drop(['log_suffix','saved_at'],axis=1))

    # PI model baselines for AG-NEWS w/ best hyperparameters
    text_pi_cfg = {'dataset':AG_News,'num_epochs':50,'trainer':PiModel,'trainer_config':{'cons_weight':30},'opt_config':{'lr':1e-3},
                    'loader_config': {'amnt_labeled':200+5000,'lab_BS':200}}
    text_classifier_cfg = {'dataset':AG_News,'num_epochs':500,'trainer':Classifier,'opt_config':{'lr':1e-3},
                    'loader_config': {'amnt_labeled':200+5000,'lab_BS':200}}
    y_text_pi_cfg = {'dataset':YAHOO,'num_epochs':50,'trainer':PiModel,'trainer_config':{'cons_weight':[30]},'opt_config':{'lr':[1e-4]},
                    'loader_config': {'amnt_labeled':800+5000,'lab_BS':800}}
    y_text_classifier_cfg = {'dataset':YAHOO,'num_epochs':500,'trainer':Classifier,'opt_config':{'lr':1e-3},
                    'loader_config': {'amnt_labeled':800+5000,'lab_BS':800}}
    # Searched from
    # text_pi_cfg = {'num_epochs':50,'trainer':PiModel,'trainer_config':{'cons_weight':[10,30]},'opt_config':{'lr':[1e-3,3e-4,3e-3]},
    #                   'loader_config': {'amnt_labeled':200+5000,'lab_BS':200}}

    textstudy = Study(PI_trial,y_text_pi_cfg,study_name='Agnews')
    textstudy.run(3)
    print(textstudy.covariates())
    print(textstudy.outcomes)
示例#3
0
import lie_conv.moleculeTrainer as moleculeTrainer
import lie_conv.lieGroups as lieGroups
from lie_conv.lieGroups import T,Trivial,SE3,SO3
import lie_conv.lieConv as lieConv
from lie_conv.lieConv import ImgLieResnet
from lie_conv.datasets import MnistRotDataset
from examples.train_molec import makeTrainer,Trial
from oil.tuning.study import Study

def trial_name(cfg):
    ncfg = cfg['net_config']
    return f"molec_f{ncfg['fill']}_n{ncfg['nbhd']}_{ncfg['group']}_{cfg['lr']}"


def bigG(cfg):
    return isinstance(cfg['net_config']['group'],(SE3,SO3))

if __name__ == '__main__':
    config_spec = copy.deepcopy(makeTrainer.__kwdefaults__)
    config_spec.update({
        'num_epochs':500,
        'net_config':{'fill':lambda cfg: (1.,1/2)[bigG(cfg)],'nbhd':lambda cfg: (100,25)[bigG(cfg)],
        'group':T(3),'liftsamples':lambda cfg: (1,4)[bigG(cfg)]},'recenter':lambda cfg: bigG(cfg),
        'lr':3e-3,'bs':lambda cfg: (100,75)[bigG(cfg)],'task':['alpha','gap','h**o','lumo','mu','Cv','G','H','r2','U','U0','zpve'],
        'trainer_config':{'log_dir':'molec_all_tasks4','log_suffix':lambda cfg:trial_name(cfg)},
    })
    config_spec = argupdated_config(config_spec,namespace=(moleculeTrainer,lieGroups))
    thestudy = Study(Trial,config_spec,study_name='molec_all_tasks4')
    thestudy.run(num_trials=-1,ordered=True)
    print(thestudy.results_df())
示例#4
0
文件: tune_img.py 项目: nishr/LieConv
from oil.utils.parallel import try_multigpu_parallelize
from oil.model_trainers.classifier import Classifier
from functools import partial
from torch.optim import Adam
from oil.tuning.args import argupdated_config
import copy
import lie_conv.lieGroups as lieGroups
import lie_conv.lieConv as lieConv
from lie_conv.lieConv import ImgLieResnet
from lie_conv.datasets import MnistRotDataset
from examples.train_img import makeTrainer
from oil.tuning.study import Study

if __name__ == '__main__':
    Trial = train_trial(makeTrainer)
    thestudy = Study(Trial, {}, study_name='tune_se2_img_hypers_alpha')
    config_spec = copy.deepcopy(makeTrainer.__kwdefaults__)
    config_spec.update({
        'num_epochs': 300,
        'net_config': {
            'k': 128,
            'total_ds': .1,
            'fill': 1 / 10,
            'nbhd': 25,
            'liftsamples': 2,
            'group':
            [lieGroups.SE2(a) for a in [0., .1, .15, .2, .25, .3, .5]]
        },
        'split': {
            'train': 10000,
            'val': 2000
示例#5
0
    'opt_config': {
        'lr': 3e-4
    },
    'dataset': MINIBOONE,  #[MINIBOONE,HEPMASS,AG_News],
    'trainer_config': {
        'log_dir':
        os.path.expanduser('~/tb-experiments/UCI/flowgmm/miniboone/'),
        'unlab_weight': 1.
    },
    'loader_config': {
        'amnt_labeled': 20 + 5000,
        'amnt_dev': 5000,
        'lab_BS': 20
    },
    'net_config': {
        'k': 256,
        'coupling_layers': 7,
        'nperlayer': 1
    }
}

#trial(one_flowgmm_cfg)
thestudy = Study(trial, text_flowgmm_cfg, study_name='text_hypers')
thestudy.run(3)
covars = thestudy.covariates()
covars['test_Acc'] = thestudy.outcomes['test_Acc'].values
covars['dev_Acc'] = thestudy.outcomes['dev_Acc'].values
print(covars.drop(['log_suffix', 'saved_at'], axis=1))
# print(thestudy.covariates())
# print(thestudy.outcomes)
示例#6
0
Trial = train_trial(makeTrainer)
#r = lambda *options: np.random.choice(options)
hyper_choices = {
    HLieResNet: {
        'net_cfg': {
            'k': 384,
            'num_layers': [2, 4],
            'group': SO2()
        },
        'lr': [1e-3, 3e-4]
    },
    #VOGN: {'net_cfg':{'k':512},'lr':[3e-3,1e-2]},
    # HOGN: {'net_cfg':{'k':256},'lr':{1e-2,3e-3,1e-4}},
    # OGN: {'net_cfg':{'k':256},'lr':{1e-2,3e-3,1e-4}},
    # FCHamNet: {'net_cfg':{'k':256,'num_layers':4},'lr':[1e-3,3e-3,1e-2]},
    #RawDynamicsNet: {'net_cfg':{'k':256},'lr':[1e-3,3e-3,1e-2]},
}

if __name__ == '__main__':
    thestudy = Study(Trial, {}, study_name='tune_dynamics_hypersbaselines')
    for network, net_config_spec in hyper_choices.items():
        config_spec = copy.deepcopy(makeTrainer.__kwdefaults__)
        config_spec.update({
            'network': network,
            'num_epochs': 50,
            'n_train': 3000,
        })
        config_spec.update(net_config_spec)
        thestudy.run(num_trials=-1, new_config_spec=config_spec, ordered=True)
    print(thestudy.results_df())