def _post_clone_action(self, **kwargs): from datetime import datetime model = self._get_details_query(None, **kwargs) name = "{0} clone: {1}".format( model.name, datetime.now().strftime("%Y-%m-%d %H:%M:%S")) new_model = Model(name=name, train_import_handler=model.train_import_handler, test_import_handler=model.test_import_handler) new_model.save() new_model.classifier = model.classifier new_model.features_set.from_dict(model.features_set.features, commit=False) new_model.tags = model.tags new_model.save() for tag in new_model.tags: tag.update_counter() return self._render( { self.OBJECT_NAME: new_model, 'status': 'New model "{0}" created'.format(new_model.name) }, code=201)
idx = np.arange(600) np.random.RandomState(seed=124).shuffle(idx) idx_train, idx_test = idx[:480], idx[480:] model = Model(pool_size=int(max_num_nodes * 0.25), device=device).to(device) model.train() # optimizer = optim.SGD(model.parameters(), lr=1e-5) optimizer = optim.Adam(model.parameters()) for e in tqdm(range(args.epochs)): pred_labels = [] for i, idx in enumerate(idx_train): adj_train = torch.from_numpy(adj[idx]).to(device).float() features_train = torch.from_numpy(features[idx]).to(device).float() labels_train = labels[idx].view(-1).long() graph_feat = model(features_train, adj_train) output = model.classifier(graph_feat) criterion = nn.CrossEntropyLoss() if args.link_pred: loss = criterion(output, labels_train) + model.link_pred_loss() else: loss = criterion(output, labels_train) loss.backward() pred_labels.append(output.argmax()) if i % 32 == 0: optimizer.step() optimizer.zero_grad() optimizer.step() optimizer.zero_grad() pred_labels = torch.stack(pred_labels, dim=0) acc = (pred_labels.long() == labels[idx_train].long()).float().mean()