示例#1
0
        # Compute distances and output
        dis = sed(x_map, protos_map)
        return dis, plabels


# Build the GLVQ model
model = Model()

# Print a summary of the model
print(model)

# Optimize using Adam optimizer from `torch.optim`
optimizer = torch.optim.Adam(model.parameters(), lr=0.001_0)
scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=75, gamma=0.1)
criterion = GLVQLoss(squashing="identity", beta=10)

# Training loop
for epoch in range(150):
    epoch_loss = 0.0  # zero-out epoch loss
    optimizer.zero_grad()  # zero-out gradients
    for xb, yb in train_loader:
        # Compute loss
        distances, plabels = model(xb)
        loss = criterion([distances, plabels], yb)
        epoch_loss += loss.item()
        # Backprop
        loss.backward()
    # Take a gradient descent step
    optimizer.step()
    scheduler.step()
示例#2
0
        eye_(self.omegas)

    def forward(self, x):
        protos = self.p1.prototypes
        plabels = self.p1.prototype_labels
        omegas = self.omegas
        dis = lomega_distance(x, protos, omegas)
        return dis, plabels


# Build the model
model = Model()

# Optimize using Adam optimizer from `torch.optim`
optimizer = torch.optim.Adam(model.parameters(), lr=0.01)
criterion = GLVQLoss(squashing="sigmoid_beta", beta=10)

x_in = torch.Tensor(x_train)
y_in = torch.Tensor(y_train)

# Training loop
title = "Prototype Visualization"
fig = plt.figure(title)
for epoch in range(100):
    # Compute loss
    dis, plabels = model(x_in)
    loss = criterion([dis, plabels], y_in)
    y_pred = np.argmin(stratified_min(dis, plabels).detach().numpy(), axis=1)
    acc = accuracy_score(y_train, y_pred)
    log_string = f"Epoch: {epoch + 1:03d} Loss: {loss.item():05.02f} "
    log_string += f"Acc: {acc * 100:05.02f}%"
示例#3
0
                               nclasses=3,
                               prototype_initializer='zeros')

    def forward(self, x):
        protos = self.p1.prototypes
        plabels = self.p1.prototype_labels
        dis = euclidean_distance(x, protos)
        return dis, plabels


# Build the GLVQ model
model = Model()

# Optimize using SGD optimizer from `torch.optim`
optimizer = torch.optim.SGD(model.parameters(), lr=0.01)
criterion = GLVQLoss(squashing='sigmoid_beta', beta=10)

# Training loop
fig = plt.figure('Prototype Visualization')
for epoch in range(70):
    # Compute loss.
    distances, plabels = model(torch.tensor(x_train))
    loss = criterion([distances, plabels], torch.tensor(y_train))
    print(f'Epoch: {epoch + 1:03d} Loss: {loss.item():02.02f}')

    # Take a gradient descent step
    optimizer.zero_grad()
    loss.backward()
    optimizer.step()

    # Get the prototypes form the model