예제 #1
0
 def __init__(self, hp: HyperParams):
     set_random_state(hp.random_seed)
     self.hp = hp
     self.device = get_device()
     self.net, self.loaders, self.opt_inner = self.configure()
     self.criterion = torch.nn.CrossEntropyLoss()
     lr_schedule = LinearDecayLR(hp.lr_outer, hp.steps_outer)
     self.opt_outer = ReptileSGD(self.net, lr_schedule)
     self.samples = {
         s: next(iter(loader))
         for s, loader in self.loaders.items()
     }
     self.support = (torch.empty(0), torch.empty(0))
     self.save_path = f"{self.hp.root}/{self.hp.to_string()}.pt"
     print(dict(save_path=self.save_path))
예제 #2
0
    def __init__(
        self,
        hp: HyperParams,
        loaders: Dict[str, MetaLoader],
        net: torch.nn.Module,
        use_gpu=True,
    ):
        self.hp = hp
        self.loaders = loaders

        self.device = get_device(use_gpu)
        self.rng = set_random_state()
        self.net = net.to(self.device)
        self.criterion = torch.nn.CrossEntropyLoss()
        self.opt_inner = torch.optim.SGD(self.net.parameters(), lr=hp.lr_inner)
        lr_schedule = LinearDecayLR(hp.lr_outer, hp.steps_outer)
        self.opt_outer = ReptileSGD(self.net, lr_schedule)
        self.batch_val = next(iter(self.loaders[Splits.val]))
예제 #3
0
    def __init__(
        self,
        hparams: HyperParams,
        loaders: Dict[str, torchmeta.utils.data.BatchMetaDataLoader],
        net: torch.nn.Module,
    ):
        self.hparams = hparams
        self.loaders = loaders

        self.device = get_device()
        self.rng = set_random_state()
        self.net = net.to(self.device)
        self.criterion = torch.nn.CrossEntropyLoss()
        self.opt_inner = torch.optim.SGD(self.net.parameters(),
                                         lr=hparams.lr_inner)
        lr_schedule = LinearDecayLR(hparams.lr_outer, hparams.steps_outer)
        self.opt_outer = ReptileSGD(self.net,
                                    lr_schedule,
                                    num_accum=hparams.bs_outer)
        self.batch_val = next(iter(self.loaders[Splits.val]))
예제 #4
0
파일: main.py 프로젝트: lt610/DAGNN
    parser.add_argument("--hid-dim",
                        type=int,
                        default=64,
                        help='Hidden layer dimensionalities.')
    parser.add_argument('--batchnorm',
                        action='store_true',
                        default=False,
                        help="batchnorm")
    parser.add_argument('--dropout', type=float, default=0.8, help='dropout')
    parser.add_argument('--drop_bef',
                        action='store_true',
                        default=True,
                        help='Location of dropout')
    args = parser.parse_args()
    print(args)

    acc_lists = []
    random_seeds = generate_random_seeds(seed=1222, nums=args.runs)

    for run in range(args.runs):
        set_random_state(random_seeds[run])
        acc_lists.append(main(args))

    acc_lists = np.array(acc_lists)

    mean = np.around(np.mean(acc_lists, axis=0), decimals=4)
    std = np.around(np.std(acc_lists, axis=0), decimals=4)

    print('Total acc: ', acc_lists)
    print('mean', mean)
    print('std', std)