Ejemplo n.º 1
0
    def train(self, dataloader):
        for epoch in range(self.args.epochs):
            for idx, (x, y) in enumerate(dataloader):
                x, y = x.to(self.device), y.to(self.device)
                # train D with real
                self.D.zero_grad()
                minibs = x.size(0)
                label = torch.full((minibs, ),
                                   self.real_label,
                                   device=self.device)
                y = self.D(x)
                loss_real = self.criterion(y, label)
                loss_real.backward()
                D_real_avg = y.mean().item()

                # train D with fake
                noise = torch.randn(minibs, self.args.rvs, 1,
                                    1).to(self.device)
                label.fill_(self.fake_label)
                fake_y = self.G(noise)
                # print(fake_y.shape)
                y = self.D(fake_y.detach())
                # print(y.shape)
                loss_fake = self.criterion(y, label)
                loss_fake.backward()
                D_fake_avg = y.mean().item()
                loss_D = loss_real + loss_fake
                self.optimizerD.step()

                # train G
                self.G.zero_grad()
                label.fill_(self.real_label)
                y = self.D(fake_y)
                loss_G = self.criterion(y, label)
                loss_G.backward()
                G_avg = y.mean().item()
                self.optimizerG.step()

                # print log
                if idx % 100 == 0:
                    print(
                        'Epoch [{}/{}], [{}/{}], Loss_D: {:.4f}, Loss_G: {:.4f}, Loss_D(x): '
                        '{:.4f}, Loss_D(G(z)): {:.4f}/{:.4f}'.format(
                            epoch, self.args.epochs, idx, len(dataloader),
                            loss_D.item(), loss_G.item(), D_real_avg, G_avg,
                            D_fake_avg))
                    get_grid(imgs=self.G(
                        self.test_noise).detach().cpu().numpy(),
                             args=self.args,
                             epoch=epoch,
                             it=idx)
        self.save()
Ejemplo n.º 2
0
def get_feasible_centers_brute(pivot, pi_hat, mu_hat, S_hat, Rho,
                               base_intervals):
    pivot_cluster, pivot_dim = pivot
    dim = mu_hat.shape[0]

    pi_hat_pivot = pi_hat[pivot_cluster]
    mu_hat_pivot = mu_hat[pivot_dim]

    pi_hat_other = pi_hat[1 - pivot_cluster]

    def is_base_feasible(center, b, j):
        return (center >= base_intervals[b][j].lo
                and center <= base_intervals[b][j].hi)

    def other_center(center, prob, mean_hat):
        return (mean_hat - prob * center) / (1 - prob)

    pivot_interval = get_grid(
        get_first_order_constraints(pi_hat_pivot, mu_hat_pivot), 1000)
    feasible_solutions = []
    for pivot_center in pivot_interval:
        candidate_solution = np.zeros((2, dim))
        candidate_solution[pivot_cluster, pivot_dim] = pivot_center
        candidate_solution[1 - pivot_cluster,
                           pivot_dim] = other_center(pivot_center,
                                                     pi_hat_pivot,
                                                     mu_hat_pivot)

        feasible = True
        for j in range(dim):
            if j == pivot_dim: continue

            jth_center = ((S_hat[pivot_dim, j] - Rho[pivot_dim, j] -
                           mu_hat[j] * pivot_center) /
                          (mu_hat_pivot - pivot_center))
            if not is_base_feasible(jth_center, 1 - pivot_cluster, j):
                feasible = False
                break
            else:
                candidate_solution[1 - pivot_cluster, j] = jth_center
                candidate_solution[pivot_cluster,
                                   j] = other_center(pivot_center,
                                                     pi_hat_other, mu_hat[j])

        if feasible:
            feasible_solutions.append(candidate_solution)

    return feasible_solutions
Ejemplo n.º 3
0
def get_second_order_constraints(S, rho, mu_2dim, other_interval):
    other_grid = get_grid(other_interval)
    this_grid = (S - rho - mu_2dim[1] * other_grid) / (mu_2dim[0] - other_grid)

    return clip(Interval(lo=this_grid.min(), hi=this_grid.max()))
Ejemplo n.º 4
0
            optimizer.step()

            if (idx + 1) % 100 == 0:
                print(
                    'Epoch: [{}/{}], Step: [{}/{}], Loss: {:.4f}, Acc: {:.4f}'.
                    format(epoch + 1, args.epochs, idx + 1, len(train_loader),
                           loss.item(), 100 * correct / total))

    # 保存模型参数
    torch.save(
        model.state_dict(),
        os.path.join(
            './log', '{}_{}_{}.ckpt'.format(args.model, args.dataset,
                                            args.epochs)))
    model.eval()
    with torch.no_grad():
        correct = 0
        total = 0
        for idx, (x, y) in enumerate(test_loader):
            x, y = x.to(device), y.to(device)
            y_pred = model(x)
            _, y_pred = torch.max(y_pred.data, 1)
            total += y.size(0)
            correct += (y_pred == y).sum().item()
            # print(result)
            if idx % 100 == 0:
                get_grid(x.cpu().numpy(), args, args.epochs, idx)
                print(y_pred.data.cpu().numpy(), y.data.cpu().numpy)
        print('Test Acc: {:.4f}%, Model: {}, Epochs: {}'.format(
            correct / total * 100, args.model, args.epochs))
        download=True,
        transform=transforms.Compose(
            [transforms.ToTensor(),
             transforms.Normalize((0.5, ), (0.5, ))])),
                                               batch_size=args.bs,
                                               shuffle=True)
    test_loader = torch.utils.data.DataLoader(datasets.MNIST(
        args.root,
        train=False,
        transform=transforms.Compose(
            [transforms.ToTensor(),
             transforms.Normalize((0.5, ), (0.5, ))])),
                                              batch_size=args.bs,
                                              shuffle=True)
    return train_loader, test_loader


from options import get_args
from util import get_grid

if __name__ == '__main__':
    args = get_args()
    # print(args.bs)
    # print(args.root)
    train_loader, test_loader = get_data(args=args)
    for idx, (x, y) in enumerate(train_loader):
        # print(x)
        # print(x.shape)
        imgs = get_grid(x, args, 1, idx)
        # imgs.show()
Ejemplo n.º 6
0
def reconstruct(dmap, kmap, mask_r, pl=10, ps=4):
  h, w, _ = dmap.shape
  grid_h, grid_w = util.get_grid(h, w)
  connections = util.get_connections()
  limbs = util.get_limbs()
  patch = util.get_patch(pl, ps)

  humans = [] # store the annotation for each human
  q = queue.Queue() # store the layers to be extended from
  used = [False] * 14 # store wether the layer has been explored
  starters, layer = find_outstander(kmap, mask_r, patch)

  # print(layer)
  # print(starters)
  # print(dmap[8,41,0])
  # print(dmap[8,41,1])
  # print(dmap[8,41,26])
  # print(dmap[8,41,27])


  for p in starters:
    dic = {}
    dic[layer] = p
    humans.append(dic)

  q.put(layer)
  used[layer] = True

  debug_cnt = 0

  while True:
    if q.empty():
      # print('empty')
      break
    layer1 = q.get()
    # print('from', layer1)
    starters = []
    for i,h in enumerate(humans):
      if layer1 in h.keys():
        starters.append((h[layer1], i))
    for piece in connections[layer1]:
      debug_cnt += 1
      # if debug_cnt == 2:
      #   return humans
      layer2, sign, d_layer = piece
      if used[layer2]:
        continue
      used[layer2] = True
      q.put(layer2)
      # print('finding', layer2)
      # print('sign', sign)
      # print('dmap layer', d_layer)
      k_slice = kmap[:,:,layer2]

      # in limbs, the vector is from START to END
      # in connections, sign = -1 means layer2 is START
      # forward is from layer1's view to layer2
      # backward is from layer2's view to layer1
      # d_layer is the layer stores vector from START to END

      # so, if sign = 1, layer1 is START, layer2 is END
      # and, dx_forward is from layer1 to layer2
      # so
      dx_forward = dmap[:,:,d_layer*2]
      dy_forward = dmap[:,:,d_layer*2+1]
      dx_backward = dmap[:,:,d_layer*2+26]
      dy_backward = dmap[:,:,d_layer*2+26+1]

      # else, if sign = -1, layer1 is END, layer2 is START
      # and, dx_forward is from layer1 to layer2
      # so
      if sign == -1:
        dx_forward, dx_backward = dx_backward, dx_forward
        dy_forward, dy_backward = dy_backward, dy_forward

      mask_log = group_match(starters, dx_forward, dy_forward, dx_backward, dy_backward, grid_h, grid_w, k_slice, mask_r, patch)
      trans_mask_log(mask_log, layer2, humans)
      # for h in humans:
      #   print(h)
      # print('\n')
      # print(humans)
      # print('mask_log')
      # print(mask_log)

  return humans
Ejemplo n.º 7
0
import numpy as np

import os


def get_data(args):
    train_dataset = ImageFolder(args.root, transform=transforms.Compose([
        transforms.Resize((32, 32)),
        transforms.CenterCrop((32, 32)),
        transforms.ToTensor(),
        transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
    ]))
    train_loader = torch.utils.data.DataLoader(dataset=train_dataset, batch_size=args.bs, shuffle=True)
    test_loader = []
    return train_loader, test_loader


from options import get_args
from util import get_grid

if __name__ == '__main__':
    args = get_args()
    # print(args.bs)
    # print(args.root)
    train_loader, test_loader = get_data(args=args)
    for idx, (x, y) in enumerate(train_loader):
        # print(x)
        # print(x.shape)
        imgs = get_grid(x.numpy(), args, 1, idx)
        # imgs.show()
Ejemplo n.º 8
0
                        'Epoch [{}/{}], [{}/{}], Loss_D: {:.4f}, Loss_G: {:.4f}, Loss_D(x): '
                        '{:.4f}, Loss_D(G(z)): {:.4f}/{:.4f}'.format(
                            epoch, self.args.epochs, idx, len(dataloader),
                            loss_D.item(), loss_G.item(), D_real_avg, G_avg,
                            D_fake_avg))
                    get_grid(imgs=self.G(
                        self.test_noise).detach().cpu().numpy(),
                             args=self.args,
                             epoch=epoch,
                             it=idx)
        self.save()

    def test(self, dataloader):
        pass

    def simple(self):
        pass


from options import get_args
from util import get_grid

if __name__ == '__main__':
    args = get_args()
    model = GAN(args=args)
    noise = torch.randn(args.bs, args.rvs, 1, 1).to(model.device)
    gg = model.G(noise)
    print(gg.shape)
    get_grid(imgs=gg.detach().cpu().numpy(), args=args, epoch=1, it=1)
    # print(model.D(gg))