Ejemplo n.º 1
0
    def fit(self, X):
        d_costs = []
        g_costs = []

        N = len(X)
        n_batches = N // BATCH_SIZE
        total_iters = 0
        for i in range(0, EPOCHS):
            print("epoch: ", i)
            np.random.shuffle(X)

            for j in range(0, n_batches):
                t0 = datetime.now()

                if (type(X[0]) is str):
                    batch = util.files2images(X[j * BATCH_SIZE:((j + 1) *
                                                                BATCH_SIZE)])

                else:
                    batch = X[j * BATCH_SIZE:(j + 1) * BATCH_SIZE]

                Z = np.random.uniform(-1,
                                      1,
                                      size=(BATCH_SIZE, self.latent_dims))

                _, d_cost, d_acc = self.sess.run(
                    (self.d_train_op, self.d_cost, self.d_accuracy),
                    feed_dict={
                        self.X: batch,
                        self.Z: Z
                    })
                d_costs.append(d_cost)

                _, g_cost1 = self.sess.run((self.g_train_op, self.g_cost),
                                           feed_dict={self.Z: Z})

                _, g_cost2 = self.sess.run((self.g_train_op, self.g_cost),
                                           feed_dict={self.Z: Z})

                g_costs.append((g_cost1 + g_cost2) / 2)

                print("batch %d/%d - dt: %s - d_acc: %.2f" %
                      (j + 1, n_batches, datetime.now() - t0))

                total_iters += 1
                if total_iters % SAVE_SAMPLE_PERIOD == 0:
                    print("saving sample...")
                    samples = self.sample(64)

                    d = self.img_length

                    if samples.shape[-1] == 1:
                        samples = samples.reshape(64, d, d)
                        flat_image = np.empty((8 * d, 8 * d))

                        k = 0
                        for a in range(0, 8):
                            for b in range(0, 8):
                                flat_image[a * d:(a + 1) * d, b * d:(b + 1) *
                                           d] = samples[k].reshape(d, d)
                                k += 1

                    else:
                        flat_image = np.empty((8 * d, 8 * d, 3))
                        k = 0
                        for a in range(0, 8):
                            for b in range(0, 8):
                                flat_image[a * d:(a + 1) * d,
                                           b * d:(b + 1) * d] = samples[k]
                                k += 1

                    sp.misc.imsave(
                        'samples/samples_at_iter_%d.png' % total_iters,
                        (flat_image + 1) / 2)

        plt.clf()
        plt.plot(d_costs, label='discriminator cost')
        plt.plot(g_costs, label='generator cost')
        plt.legend()
        plt.savefig('cost_vs_iteration.png')
Ejemplo n.º 2
0
    def fit(self, X):
        d_costs = []
        g_costs = []

        N = len(X)
        n_batches = N // BATCH_SIZE
        total_iters = 0
        for i in range(EPOCHS):
            print("epoch:", i)
            np.random.shuffle(X)
            for j in range(n_batches):
                t0 = datetime.now()

                if type(X[0]) is str:
                    # is celeb dataset
                    batch = util.files2images(X[j * BATCH_SIZE:(j + 1) *
                                                BATCH_SIZE])

                else:
                    # is mnist dataset
                    batch = X[j * BATCH_SIZE:(j + 1) * BATCH_SIZE]

                Z = np.random.uniform(-1,
                                      1,
                                      size=(BATCH_SIZE, self.latent_dims))

                # train the discriminator
                _, d_cost, d_acc = self.sess.run(
                    (self.d_train_op, self.d_cost, self.d_accuracy),
                    feed_dict={
                        self.X: batch,
                        self.Z: Z,
                        self.batch_sz: BATCH_SIZE
                    },
                )
                d_costs.append(d_cost)

                # train the generator
                _, g_cost1 = self.sess.run(
                    (self.g_train_op, self.g_cost),
                    feed_dict={
                        self.Z: Z,
                        self.batch_sz: BATCH_SIZE
                    },
                )
                # g_costs.append(g_cost1)
                _, g_cost2 = self.sess.run(
                    (self.g_train_op, self.g_cost),
                    feed_dict={
                        self.Z: Z,
                        self.batch_sz: BATCH_SIZE
                    },
                )
                g_costs.append((g_cost1 + g_cost2) / 2)  # just use the avg

                print("  batch: %d/%d  -  dt: %s - d_acc: %.2f" %
                      (j + 1, n_batches, datetime.now() - t0, d_acc))

                # save samples periodically
                total_iters += 1
                if total_iters % SAVE_SAMPLE_PERIOD == 0:
                    print("saving a sample...")
                    samples = self.sample(64)  # shape is (64, D, D, color)

                    # for convenience
                    d = self.img_length

                    if samples.shape[-1] == 1:
                        # if color == 1, we want a 2-D image (N x N)
                        samples = samples.reshape(64, d, d)
                        flat_image = np.empty((8 * d, 8 * d))

                        k = 0
                        for i in range(8):
                            for j in range(8):
                                flat_image[i * d:(i + 1) * d, j * d:(j + 1) *
                                           d] = samples[k].reshape(d, d)
                                k += 1

                        # plt.imshow(flat_image, cmap='gray')
                    else:
                        # if color == 3, we want a 3-D image (N x N x 3)
                        flat_image = np.empty((8 * d, 8 * d, 3))
                        k = 0
                        for i in range(8):
                            for j in range(8):
                                flat_image[i * d:(i + 1) * d,
                                           j * d:(j + 1) * d] = samples[k]
                                k += 1
                        # plt.imshow(flat_image)

                    # plt.savefig('samples/samples_at_iter_%d.png' % total_iters)
                    sp.misc.imsave(
                        'samples/samples_at_iter_%d.png' % total_iters,
                        flat_image,
                    )

        # save a plot of the costs
        plt.clf()
        plt.plot(d_costs, label='discriminator cost')
        plt.plot(g_costs, label='generator cost')
        plt.legend()
        plt.savefig('cost_vs_iteration.png')
  def fit(self, X):
    d_costs = []
    g_costs = []

    N = len(X)
    n_batches = N // BATCH_SIZE
    total_iters = 0
    for i in range(EPOCHS):
      print("epoch:", i)
      np.random.shuffle(X)
      for j in range(n_batches):
        t0 = datetime.now()

        if type(X[0]) is str:
          # is celeb dataset
          batch = util.files2images(
            X[j*BATCH_SIZE:(j+1)*BATCH_SIZE]
          )

        else:
          # is mnist dataset
          batch = X[j*BATCH_SIZE:(j+1)*BATCH_SIZE]

        Z = np.random.uniform(-1, 1, size=(BATCH_SIZE, self.latent_dims))

        # train the discriminator
        _, d_cost, d_acc = self.sess.run(
          (self.d_train_op, self.d_cost, self.d_accuracy),
          feed_dict={self.X: batch, self.Z: Z, self.batch_sz: BATCH_SIZE},
        )
        d_costs.append(d_cost)

        # train the generator
        _, g_cost1 = self.sess.run(
          (self.g_train_op, self.g_cost),
          feed_dict={self.Z: Z, self.batch_sz: BATCH_SIZE},
        )
        # g_costs.append(g_cost1)
        _, g_cost2 = self.sess.run(
          (self.g_train_op, self.g_cost),
          feed_dict={self.Z: Z, self.batch_sz: BATCH_SIZE},
        )
        g_costs.append((g_cost1 + g_cost2)/2) # just use the avg

        print("  batch: %d/%d  -  dt: %s - d_acc: %.2f" % (j+1, n_batches, datetime.now() - t0, d_acc))


        # save samples periodically
        total_iters += 1
        if total_iters % SAVE_SAMPLE_PERIOD == 0:
          print("saving a sample...")
          samples = self.sample(64) # shape is (64, D, D, color)

          # for convenience
          d = self.img_length
          
          if samples.shape[-1] == 1:
            # if color == 1, we want a 2-D image (N x N)
            samples = samples.reshape(64, d, d)
            flat_image = np.empty((8*d, 8*d))

            k = 0
            for i in range(8):
              for j in range(8):
                flat_image[i*d:(i+1)*d, j*d:(j+1)*d] = samples[k].reshape(d, d)
                k += 1

            # plt.imshow(flat_image, cmap='gray')
          else:
            # if color == 3, we want a 3-D image (N x N x 3)
            flat_image = np.empty((8*d, 8*d, 3))
            k = 0
            for i in range(8):
              for j in range(8):
                flat_image[i*d:(i+1)*d, j*d:(j+1)*d] = samples[k]
                k += 1
            # plt.imshow(flat_image)
            
          # plt.savefig('samples/samples_at_iter_%d.png' % total_iters)
          sp.misc.imsave(
            'samples/samples_at_iter_%d.png' % total_iters,
            flat_image,
          )

    # save a plot of the costs
    plt.clf()
    plt.plot(d_costs, label='discriminator cost')
    plt.plot(g_costs, label='generator cost')
    plt.legend()
    plt.savefig('cost_vs_iteration.png')