for i in range(epochs): print("Epoch %i" % i) optimizer.zero_grad() W_out, C = model.forward() # Transpose to match n x W W_in = model.W_in loss = autoExtend_loss(W_out, W_in, model.E, model.D, C,\ num_words, num_classes, num_lexemes, num_dims, alpha, beta) print("LOSS: %4f" % loss.data[0]) if loss.data[0] < last_loss:# and i > 5: print("Normalizing Maps...") model.normalize_columns() print("Computing backward...") loss.backward() optimizer.step() last_loss = loss.data[0] W = model.W_in E = model.E.data D = model.D.data torch.save(W, "./parameters/scoped_embeds.%i_%i"\ % (dim_start, dim_end)) torch.save(E, "./parameters/E.%i_%i"\ % (dim_start, dim_end))