Beispiel #1
0
def update_param(data, model, optimizer, compactness, pos_scale, device,
                 disc_loss):
    inputs, labels, spix, spix_num = data

    inputs = inputs.to(device)
    labels = labels.to(device)
    spix = spix.to(device)

    inputs = pos_scale * inputs

    (Q, H, _, _), msf_feature = model(inputs)

    recons_loss = reconstruct_loss_with_cross_etnropy(Q, labels)
    spix_loss = reconstruct_loss_with_cross_etnropy(Q, spix)
    compact_loss = reconstruct_loss_with_mse(Q, inputs, H)
    disc = disc_loss(msf_feature, spix_num)

    #uniform_compactness = uniform_compact_loss(Q,coords.reshape(*coords.shape[:2], -1), H,device=device)

    loss = recons_loss + 0.5 * spix_loss + compactness * compact_loss + disc

    optimizer.zero_grad()  # clear previous grad
    loss.backward()  # cal the grad
    optimizer.step()  # backprop

    return {
        "loss": loss.item(),
        "spix": spix_loss.item(),
        "reconstruction": recons_loss.item(),
        "compact": compact_loss.item(),
        "disc": disc.item()
    }
Beispiel #2
0
def update_param(data, model, optimizer, compactness, pos_scale, spix_weig,
                 device):
    inputs, labels, spix, _ = data

    inputs = inputs.to(device)
    labels = labels.to(device)
    spix = spix.to(device)

    inputs = pos_scale * inputs

    Q, H, _, _ = model(inputs)

    recons_loss = reconstruct_loss_with_cross_etnropy(Q, labels)
    recons_loss_spix = reconstruct_loss_with_cross_etnropy(Q, spix)
    compact_loss = reconstruct_loss_with_mse(Q, inputs, H)
    #uniform_compactness = uniform_compact_loss(Q,coords.reshape(*coords.shape[:2], -1), H,device=device)

    loss = recons_loss + compactness * compact_loss + spix_weig * recons_loss_spix

    optimizer.zero_grad()  # clear previous grad
    loss.backward()  # cal the grad
    optimizer.step()  # backprop

    return {
        "loss": loss.item(),
        "reconstruction": recons_loss.item(),
        "compact": compact_loss.item(),
        "spix": recons_loss_spix.item()
    }
Beispiel #3
0
def update_param(data, model, optimizer, compactness, pos_scale, device):
    inputs, labels, _ = data

    inputs = inputs.to(device)
    labels = labels.to(device)

    inputs = pos_scale * inputs

    Q, H, _, _ = model(inputs)

    recons_loss = reconstruct_loss_with_cross_etnropy(Q, labels)
    compact_loss = reconstruct_loss_with_mse(Q, inputs, H)
    #uniform_compactness = uniform_compact_loss(Q,coords.reshape(*coords.shape[:2], -1), H,device=device)

    loss = recons_loss + compactness * compact_loss

    optimizer.zero_grad()  # clear previous grad
    loss.backward()  # cal the grad
    optimizer.step()  # backprop

    return {
        "loss": loss.item(),
        "reconstruction": recons_loss.item(),
        "compact": compact_loss.item(),
        "lr": optimizer.state_dict()['param_groups'][0]['lr']
    }
Beispiel #4
0
def update_param(data, model, optimizer, compactness, color_scale, pos_scale, device):
    inputs, labels = data

    inputs = inputs.to(device)
    labels = labels.to(device)

    height, width = inputs.shape[-2:]

    nspix_per_axis = int(math.sqrt(model.nspix))
    pos_scale = pos_scale * max(nspix_per_axis/height, nspix_per_axis/width)    

    coords = torch.stack(torch.meshgrid(torch.arange(height, device=device), torch.arange(width, device=device)), 0)
    coords = coords[None].repeat(inputs.shape[0], 1, 1, 1).float()

    inputs = torch.cat([color_scale*inputs, pos_scale*coords], 1)

    Q, H, feat = model(inputs)

    recons_loss = reconstruct_loss_with_cross_etnropy(Q, labels)
    compact_loss = reconstruct_loss_with_mse(Q, coords.reshape(*coords.shape[:2], -1), H)

    loss = recons_loss + compactness * compact_loss

    optimizer.zero_grad()
    loss.backward()
    optimizer.step()

    return {"loss": loss.item(), "reconstruction": recons_loss.item(), "compact": compact_loss.item()}
Beispiel #5
0
def update_param(data,
                 model,
                 optimizer,
                 compactness,
                 pos_scale,
                 device,
                 disc=None):
    inputs, labels, labels_num = data

    inputs = inputs.to(device)
    labels = labels.to(device)

    inputs = pos_scale * inputs

    (Q, H, _, _), mfa = model(inputs)

    recons_loss = reconstruct_loss_with_cross_etnropy(Q, labels)
    compact_loss = reconstruct_loss_with_mse(Q, inputs, H)
    #uniform_compactness = uniform_compact_loss(Q,coords.reshape(*coords.shape[:2], -1), H,device=device)

    loss = recons_loss + compactness * compact_loss
    if disc is not None:
        disc_loss = disc(mfa, labels_num)
        loss = loss + 1e-3 * disc_loss
    else:
        disc_loss = 0

    optimizer.zero_grad()  # clear previous grad
    loss.backward()  # cal the grad
    optimizer.step()  # backprop
    lr = optimizer.state_dict()['param_groups'][0]['lr']
    return {
        "loss": loss.item(),
        "reconstruction": recons_loss.item(),
        "compact": compact_loss.item(),
        "disc": disc_loss,
        "lr": lr
    }