Exemple #1
0
    def __init__(self, device=None, learning_rate=1e-3, act=F.relu, T=3):
        # Settings
        self.device = device
        self.act = act
        self.learning_rate = learning_rate
        self.T = T
        self.t = 0

        # Loss
        self.recon_loss = ReconstructionLoss()

        # Model
        from meta_st.cifar10.cnn_model_001 import Model
        self.model = Model(device, act)
        self.model.to_gpu(device) if device is not None else None
        self.model_params = OrderedDict([x for x in self.model.namedparams()])

        # Optimizer
        self.setup_meta_learners()
Exemple #2
0
def test_forward():
    device = None
    act = F.leaky_relu
    model = Model(device, act)
    model_params = OrderedDict([x for x in model.namedparams()])

    x_data = np.random.rand(4, 3, 32, 32).astype(np.float32)
    y_data = np.random.randint(0, 10, 4).astype(np.int32)
    x = Variable(x_data)
    y = Variable(y_data)

    # forward
    y_pred = model(x, model_params, test=False)
    l = F.softmax_cross_entropy(y_pred, y)

    # backward
    model.cleargrads()
    l.backward(retain_grad=True)

    # change variable held in model_params
    for k, v in model_params.items():
        w = Variable(np.copy(v.grad))
        w_ = F.dropout(w)
        model_params[k] = w_

    # forward
    y_pred = model(x, model_params, test=False)
    l = F.softmax_cross_entropy(y_pred, y)

    # backward
    model.cleargrads()
    l.backward(retain_grad=True)

    # check
    print("after backward")
    for k, v in model_params.items():
        if v.grad is not None:
            print(k)
Exemple #3
0
    def __init__(self, device=None, learning_rate=1e-3, act=F.leaky_relu, T=3):
        # Settings
        self.device = device
        self.act = act
        self.learning_rate = learning_rate
        self.T = 3
        self.t = 0
        self.loss_ml = 0

        # Loss
        self.rc_loss = ReconstructionLoss()

        # Model
        from meta_st.cifar10.cnn_model_001 import Model
        self.model = Model(device, act)
        self.model.to_gpu(device) if device is not None else None
        self.model_params = OrderedDict([x for x in self.model.namedparams()])

        # Optimizer
        self.optimizer = Adam(learning_rate)  #TODO: adam is appropriate?
        self.optimizer.setup(self.model)
        self.optimizer.use_cleargrads()
        self.setup_meta_learners()