Example #1
0
 def __init__(self, gan, samples_per_row=8):
     BaseSampler.__init__(self, gan, samples_per_row)
     self.x = gan.session.run(gan.inputs.x)
     batch = self.x.shape[0]
     self.x = np.reshape(
         self.x[0], [1, self.x.shape[1], self.x.shape[2], self.x.shape[3]])
     self.x = np.tile(self.x, [batch, 1, 1, 1])
    def __init__(self, gan, samples_per_row=4, session=None):
        BaseSampler.__init__(self, gan, samples_per_row)
        self.latent1 = self.gan.latent.next()
        self.latent2 = self.gan.latent.next()
        self.velocity = 2 / 30.0
        direction = self.gan.latent.next()
        self.origin = direction
        self.pos = self.latent1
        self.hardtanh = nn.Hardtanh()
        g_params = self.gan.latent_parameters()
        if self.latent1.shape[1] // 2 == g_params[0].shape[1]:
            #recombine a split
            g_params = [
                torch.cat([p1, p2], 1)
                for p1, p2 in zip(g_params[:len(g_params) //
                                           2], g_params[len(g_params) // 2:])
            ]

        self.eigvec = torch.svd(torch.cat(g_params, 0)).V
        #self.eigvec = torch.svd(list(self.gan.g_parameters())[0]).V
        self.index = 0
        self.direction = self.eigvec[:, self.index].unsqueeze(0)
        self.direction = self.direction / torch.norm(self.direction)
        self.ones = torch.ones_like(self.direction, device="cuda:0")
        self.mask = torch.cat([
            torch.zeros([1, direction.shape[1] // 2]),
            torch.ones([1, direction.shape[1] // 2])
        ],
                              dim=1).cuda()
        self.mask = torch.ones_like(self.mask).cuda()
        self.steps = 30
Example #3
0
    def __init__(self, gan, samples_per_row=8):
        BaseSampler.__init__(self, gan, samples_per_row)
        x_t = gan.inputs.x
        global x
        x = gan.session.run(x_t)

        self.samplers = []
        default = tf.zeros_like(gan.generator.sample)

        def add_samples(layer):
            layer = gan.generator.layer(layer)
            if layer is None:
                layer = default

            self.samplers.append(
                IdentitySampler(
                    gan, tf.image.resize_images(layer, [256, 256], method=1),
                    1))

        add_samples('g8x8')
        add_samples('g16x16')
        add_samples('g32x32')
        add_samples('g64x64')
        add_samples('g128x128')
        add_samples('g256x256')
 def __init__(self, gan, samples_per_row=8):
     BaseSampler.__init__(self, gan, samples_per_row)
     self.z = None
     self.y = None
     self.x = None
     self.step = 0
     self.steps = 30
     self.target = None
Example #5
0
 def __init__(self, gan, samples_per_row=8):
     BaseSampler.__init__(self, gan, samples_per_row)
     self.z = None
     self.y = None
     self.x = None
     self.g_t = self.replace_none(gan.generator.sample)
     self.rows = 4
     self.columns = 8
 def __init__(self, gan, samples_per_row=8):
     BaseSampler.__init__(self, gan, samples_per_row)
     self.z = None
     self.y = None
     self.x = None
     self.g_t = self.replace_none(gan.generator.sample)
     self.rows = 4
     self.columns = 8
Example #7
0
 def __init__(self, gan, samples_per_row=8):
     BaseSampler.__init__(self, gan, samples_per_row)
     self.z = None
     self.y = None
     self.x = None
     self.step = 0
     self.steps = 30
     self.target = None
Example #8
0
 def __init__(self, gan, samples_per_row=8):
     sess = gan.session
     self.x = gan.session.run(gan.preview)
     print("__________", np.shape(self.x),'---oo')
     frames = np.shape(self.x)[1]//height
     self.frames=frames
     self.x = np.split(self.x, frames, axis=1)
     self.i = 0
     BaseSampler.__init__(self, gan, samples_per_row)
Example #9
0
 def __init__(self, gan, samples_per_row=8, session=None):
     BaseSampler.__init__(self, gan, samples_per_row)
     self.z_start = None
     self.y = None
     self.x = None
     self.step = 0
     self.steps = []
     self.step_count = 30
     self.target = None
Example #10
0
    def __init__(self, gan, samples_per_row=8):
        BaseSampler.__init__(self, gan, samples_per_row)
        x_t = gan.inputs.x
        global x
        x = gan.session.run(x_t)
        self.samplers = [
            #IdentitySampler(gan, gan.inputs.x, samples_per_row),
            #IdentitySampler(gan, gan.inputs.xb, samples_per_row),
            #IdentitySampler(gan, gan.autoencoded_x, samples_per_row),
            #StaticBatchSampler(gan, samples_per_row),
            #BatchSampler(gan, samples_per_row),
            #RandomWalkSampler(gan, samples_per_row)
        ]

        #self.samplers += [IdentitySampler(gan, tf.image.resize_images(gan.inputs.x, [128,128], method=1), samples_per_row)]
        if hasattr(gan.generator, 'pe_layers'):
            self.samplers += [
                IdentitySampler(gan, gx, samples_per_row)
                for gx in gan.generator.pe_layers
            ]
            pe_layers = self.gan.skip_connections.get_array(
                "progressive_enhancement")
        #self.samplers +=
        if hasattr(gan, 'autoencoded_x'):
            self.samplers += [
                IdentitySampler(
                    gan, tf.concat([gan.inputs.x, gan.autoencoded_x], axis=0),
                    samples_per_row)
            ]
        if gan.config.loss['class'] == BoundaryEquilibriumLoss:
            self.samplers += [BeganSampler(gan, samples_per_row)]

        if isinstance(gan.generator, SegmentGenerator):
            self.samplers += [SegmentSampler(gan)]

        if hasattr(gan, 'seq'):
            self.samplers += [
                IdentitySampler(
                    gan, tf.image.resize_images(gx, [128, 128], method=1),
                    samples_per_row) for gx in gan.seq
            ]

        default = gan.generator.sample  #tf.zeros_like(gan.generator.layer('gend8x8'))

        def add_samples(layer):
            layer = gan.generator.layer(layer)
            if layer is None:
                layer = default

            self.samplers.append(
                IdentitySampler(
                    gan, tf.image.resize_images(layer, [128, 128], method=1),
                    1))

        add_samples('gend8x8')
        add_samples('gend16x16')
Example #11
0
 def __init__(self, gan, samples_per_row=8, session=None):
     BaseSampler.__init__(self, gan, samples_per_row)
     self.z = None
     self.y = None
     self.x = None
     self.step = 0
     self.steps = 30
     self.target = None
     self.z_t = gan.uniform_distribution.sample
     self.z_v = gan.session.run(self.z_t)
     self.styleb_t = gan.styleb.sample
 def __init__(self, gan, samples_per_row=8, session=None):
     BaseSampler.__init__(self, gan, samples_per_row)
     self.z = None
     self.y = None
     self.x = None
     self.step = 0
     self.steps = 30
     self.target = None
     self.z_t = gan.uniform_distribution.sample
     self.z_v = gan.session.run(self.z_t)
     self.styleb_t = gan.styleb.sample
 def __init__(self, gan, samples_per_row=8, session=None):
     BaseSampler.__init__(self, gan, samples_per_row)
     self.z_start = None
     self.y = None
     self.x = None
     self.step = 0
     self.steps = []
     self.step_count = 30
     self.target = None
     self.rows = 2
     self.columns = 4
     self.needed = int(self.rows * self.columns / gan.batch_size())
 def __init__(self, gan, samples_per_row=8, session=None):
     BaseSampler.__init__(self, gan, samples_per_row)
     self.z_start = None
     self.y = None
     self.x = None
     self.step = 0
     self.steps = []
     self.step_count = 30
     self.target = None
     self.rows = 2
     self.columns = 4
     self.needed = int(self.rows*self.columns / gan.batch_size())
Example #15
0
 def __init__(self, gan, samples_per_row=8):
     BaseSampler.__init__(self, gan, samples_per_row)
     self.latent = self.gan.latent.next().data.clone()
     self.x = torch.cat([
         torch.unsqueeze(self.gan.x[0], 0).repeat(gan.batch_size() // 2, 1,
                                                  1, 1),
         torch.unsqueeze(self.gan.x[1], 0).repeat(gan.batch_size() // 2, 1,
                                                  1, 1)
     ], 0)
     self.bw = self.x.mean(axis=1, keepdims=True).repeat(1, 3, 1,
                                                         1).to(gan.device)
     self.gan = gan
Example #16
0
 def __init__(self, gan, samples_per_row=4, session=None):
     BaseSampler.__init__(self, gan, samples_per_row)
     self.latent1 = self.gan.latent.next()
     self.latent2 = self.gan.latent.next()
     self.velocity = 15 / 24.0
     direction = self.gan.latent.next()
     self.origin = direction
     self.pos = self.latent1
     self.direction = direction / torch.norm(
         direction, p=2, dim=1, keepdim=True).expand_as(direction)
     self.hardtanh = nn.Hardtanh()
     self.ones = torch.ones_like(self.direction, device="cuda:0")
     self.mask = torch.cat([
         torch.zeros([1, direction.shape[1] // 2]),
         torch.ones([1, direction.shape[1] // 2])
     ],
                           dim=1).cuda()
     self.mask = torch.ones_like(self.mask).cuda()
Example #17
0
    def __init__(self, gan, samples_per_row=8):
        BaseSampler.__init__(self, gan, samples_per_row)
        x_t = gan.inputs.x
        global x
        x = gan.session.run(x_t)
        self.samplers = [
          #IdentitySampler(gan, gan.inputs.x, samples_per_row),
          #IdentitySampler(gan, gan.inputs.xb, samples_per_row),
          #IdentitySampler(gan, gan.autoencoded_x, samples_per_row),
          #StaticBatchSampler(gan, samples_per_row),
          #BatchSampler(gan, samples_per_row),
          #RandomWalkSampler(gan, samples_per_row)
        ]

        #self.samplers += [IdentitySampler(gan, tf.image.resize_images(gan.inputs.x, [128,128], method=1), samples_per_row)]
        if hasattr(gan.generator, 'pe_layers'):
            self.samplers += [IdentitySampler(gan, gx, samples_per_row) for gx in gan.generator.pe_layers]
            pe_layers = self.gan.skip_connections.get_array("progressive_enhancement")
        #self.samplers += 
        if hasattr(gan, 'autoencoded_x'):
          self.samplers += [IdentitySampler(gan, tf.concat([gan.inputs.x,gan.autoencoded_x], axis=0), samples_per_row)]
        if gan.config.loss['class'] == BoundaryEquilibriumLoss:
          self.samplers += [BeganSampler(gan, samples_per_row)]


        if isinstance(gan.generator, SegmentGenerator):
            self.samplers += [SegmentSampler(gan)]

        if hasattr(gan, 'seq'):
            self.samplers += [IdentitySampler(gan, tf.image.resize_images(gx, [128,128], method=1), samples_per_row) for gx in gan.seq]

        default = gan.generator.sample#tf.zeros_like(gan.generator.layer('gend8x8'))
        def add_samples(layer):
            layer = gan.generator.layer(layer)
            if layer is None:
                layer = default

            self.samplers.append(IdentitySampler(gan, tf.image.resize_images(layer, [128,128], method=1), 1))

        add_samples('gend8x8')
        add_samples('gend16x16')
Example #18
0
    def __init__(self, gan, samples_per_row=8):
        BaseSampler.__init__(self, gan, samples_per_row)
        self.latent = self.gan.latent.next().data.clone()
        #self.x = torch.unsqueeze(self.gan.x[0],0).repeat(gan.batch_size(),1,1,1)
        self.x = self.gan.x
        self.bw = self.x.mean(axis=1, keepdims=True).repeat(1, 3, 1,
                                                            1).to(gan.device)
        self.gan = gan

        self.latent1 = self.gan.latent.next()
        self.latent2 = self.gan.latent.next()
        self.velocity = 15 / 24.0
        direction = self.gan.latent.next()
        self.origin = direction
        self.pos = self.latent1
        self.direction = direction / torch.norm(
            direction, p=2, dim=1, keepdim=True).expand_as(direction)
        self.hardtanh = nn.Hardtanh()
        self.ones = torch.ones_like(self.direction, device="cuda:0")
        self.xstep = 0
        self.xstep_count = 1200
    def __init__(self, gan, samples_per_row=8):
        BaseSampler.__init__(self, gan, samples_per_row)
        x_t = gan.inputs.x
        global x
        x = gan.session.run(x_t)

        self.samplers = []
        default = tf.zeros_like(gan.generator.sample)
        def add_samples(layer):
            layer = gan.generator.layer(layer)
            if layer is None:
                layer = default

            self.samplers.append(IdentitySampler(gan, tf.image.resize_images(layer, [256,256], method=1), 1))

        add_samples('g8x8')
        add_samples('g16x16')
        add_samples('g32x32')
        add_samples('g64x64')
        add_samples('g128x128')
        add_samples('g256x256')
Example #20
0
 def __init__(self, gan, samples_per_row=8):
     BaseSampler.__init__(self, gan, samples_per_row)
     self.z = None
     self.y = None
     self.x = None
     self.mask = None
     self.step = 0
     self.steps = 8
     self.target = None
     self.y_t = gan.y.sample
     self.y = gan.session.run(self.y_t)
     self.g=tf.get_default_graph()
     self.frames = gan.session.run(gan.frames)
     self.frames_t = gan.frames
     self.zs2, self.cs2 = gan.session.run([gan.zs[-1], gan.cs[-1]])
     self.zs2 = [self.zs2]
     self.cs2 = [self.cs2]
     self.zs_t = [gan.video_generator_last_z]
     self.cs_t = [gan.video_generator_last_c]
     self.zs = gan.session.run([gan.video_generator_last_z])
     self.cs = gan.session.run([gan.video_generator_last_c])
     self.i=0
Example #21
0
 def __init__(self, gan, samples_per_row=8):
     BaseSampler.__init__(self, gan, samples_per_row)
     self.z = None
     self.y = None
     self.x = None
     self.mask = None
     self.step = 0
     self.steps = 8
     self.target = None
     self.y_t = gan.y.sample
     self.y = gan.session.run(self.y_t)
     self.g=tf.get_default_graph()
     self.frames = gan.session.run(gan.frames)
     self.frames_t = gan.frames
     self.zs2, self.cs2 = gan.session.run([gan.zs[-1], gan.cs[-1]])
     self.zs2 = [self.zs2]
     self.cs2 = [self.cs2]
     self.zs_t = [gan.video_generator_last_z]
     self.cs_t = [gan.video_generator_last_c]
     self.zs = gan.session.run([gan.video_generator_last_z])
     self.cs = gan.session.run([gan.video_generator_last_c])
     self.i=0
Example #22
0
 def __init__(self, gan, node, samples_per_row=8, x=None, z=None):
     self.node = node
     BaseSampler.__init__(self, gan, samples_per_row)
     self.z = None
     self.x = None
Example #23
0
 def __init__(self, gan, samples_per_row=8):
     BaseSampler.__init__(self, gan, samples_per_row)
Example #24
0
 def __init__(self, gan, node, samples_per_row=8):
     self.node = node
     BaseSampler.__init__(self, gan, samples_per_row)
Example #25
0
 def __init__(self, gan):
     BaseSampler.__init__(self, gan)
     self.xs = None
     self.samples = 3
Example #26
0
 def __init__(self, gan):
     BaseSampler.__init__(self, gan)
     self.x_v = None
     self.z_v = None
     self.created = False
     self.mask_t = None
Example #27
0
 def __init__(self, gan, samples_per_row=8):
     BaseSampler.__init__(self, gan, samples_per_row)
     self.z = None
     self.y = None
     self.x = None
     self.d_real = None
Example #28
0
 def __init__(self, gan):
     BaseSampler.__init__(self, gan)
     self.xs = None
     self.samples = 10
     self.display_count = 5
Example #29
0
    def __init__(self, gan, samples_per_row=8):
        BaseSampler.__init__(self, gan, samples_per_row)
        x_t = gan.inputs.x
        global x
        x = gan.session.run(x_t)
        self.samplers = [
            #IdentitySampler(gan, gan.inputs.x, samples_per_row),
            #IdentitySampler(gan, gan.inputs.xb, samples_per_row),
            #IdentitySampler(gan, gan.autoencoded_x, samples_per_row),
            #StaticBatchSampler(gan, samples_per_row),
            #BatchSampler(gan, samples_per_row),
            #RandomWalkSampler(gan, samples_per_row)
        ]

        #self.samplers += [IdentitySampler(gan, tf.image.resize_images(gan.inputs.x, [128,128], method=1), samples_per_row)]
        #if hasattr(gan.generator, 'pe_layers'):
        #    self.samplers += [IdentitySampler(gan, gx, samples_per_row) for gx in gan.generator.pe_layers]
        #    pe_layers = self.gan.skip_connections.get_array("progressive_enhancement")
        if hasattr(gan, 'noise_generator'):
            self.samplers += [
                IdentitySampler(
                    gan,
                    tf.concat([
                        gan.noise_generator.sample, gan.generator.sample,
                        gan.noise_generator.sample + gan.generator.sample
                    ],
                              axis=0), samples_per_row)
            ]
        #self.samplers +=
        if hasattr(gan, 'autoencoded_x'):
            self.samplers += [
                IdentitySampler(
                    gan, tf.concat([gan.inputs.x, gan.autoencoded_x], axis=0),
                    samples_per_row)
            ]
        if gan.config.loss['class'] == BoundaryEquilibriumLoss:
            self.samplers += [BeganSampler(gan, samples_per_row)]

        if isinstance(gan.generator, SegmentGenerator):
            self.samplers += [SegmentSampler(gan)]

        if hasattr(gan, 'seq'):
            self.samplers += [
                IdentitySampler(
                    gan, tf.image.resize_images(gx, [128, 128], method=1),
                    samples_per_row) for gx in gan.seq
            ]

        for train_hook in self.gan.train_hooks():
            if isinstance(train_hook, RollingMemoryTrainHook):
                if "is_cross_replica_context" in dir(
                        tf.distribute
                ) and tf.distribute.is_cross_replica_context():
                    for debug in train_hook.distributed_debug():
                        self.samplers += [
                            IdentitySampler(gan, debug, samples_per_row)
                        ]
                else:
                    self.samplers += [
                        IdentitySampler(gan, train_hook.mx, samples_per_row)
                    ]
                    self.samplers += [
                        IdentitySampler(gan, train_hook.mg, samples_per_row)
                    ]
            if isinstance(train_hook, RollingMemory2TrainHook):
                if "is_cross_replica_context" in dir(
                        tf.distribute
                ) and tf.distribute.is_cross_replica_context():
                    for debug in train_hook.distributed_debug():
                        self.samplers += [
                            IdentitySampler(gan, debug, samples_per_row)
                        ]
                else:
                    for v in train_hook.variables():
                        self.samplers += [
                            IdentitySampler(gan, v, samples_per_row)
                        ]

        default = gan.generator.sample  #tf.zeros_like(gan.generator.layer('gend8x8'))

        def add_samples(layer):
            layer = gan.generator.layer(layer)
            if layer is None:
                layer = default

            self.samplers.append(
                IdentitySampler(
                    gan, tf.image.resize_images(layer, [128, 128], method=1),
                    1))

        #add_samples('gend8x8')
        #add_samples('gend16x16')
        #add_samples('gend32x32')
        #add_samples('gend64x64')
        #add_samples('gend128x128')
        if hasattr(gan.discriminator, 'named_layers'
                   ) and "match_support_mx" in gan.discriminator.named_layers:
            self.samplers.append(
                IdentitySampler(
                    gan,
                    tf.concat([
                        gan.inputs.x,
                        tf.image.resize_images(
                            gan.discriminator.named_layers['match_support_mx'],
                            [128, 128],
                            method=1),
                        tf.image.resize_images(
                            gan.discriminator.
                            named_layers['match_support_m+x'], [128, 128],
                            method=1)
                    ],
                              axis=0), 1))
            self.samplers.append(
                IdentitySampler(
                    gan,
                    tf.concat([
                        gan.generator.sample,
                        tf.image.resize_images(
                            gan.discriminator.named_layers['match_support_mg'],
                            [128, 128],
                            method=1),
                        tf.image.resize_images(
                            gan.discriminator.
                            named_layers['match_support_m+g'], [128, 128],
                            method=1)
                    ],
                              axis=0), 1))
Example #30
0
 def __init__(self, gan, samples_per_row=8):
     BaseSampler.__init__(self, gan, samples_per_row=samples_per_row)
     self.xs = None
     self.samples = 1
 def __init__(self, gan):
     BaseSampler.__init__(self, gan)
     self.xs = None
     self.samples = 10
     self.display_count = 5
Example #32
0
 def __init__(self, gan):
     BaseSampler.__init__(self, gan)
     self.xs = None
     self.samples = 3
 def __init__(self, gan):
     BaseSampler.__init__(self, gan)
     self.xa_v = None
     self.xb_v = None
     self.created = False
Example #34
0
    def __init__(self, gan, samples_per_row=8):
        self.z = None

        self.i = 0
        BaseSampler.__init__(self, gan, samples_per_row)
Example #35
0
 def __init__(self, gan, samples_per_row):
     BaseSampler.__init__(self, gan, samples_per_row)
     self.x_v = None
     self.z_v = None
     self.created = False
 def __init__(self, gan, samples_per_row=8):
     BaseSampler.__init__(self, gan, samples_per_row)
     self.z = None
     self.y = None
     self.x = None
Example #37
0
 def __init__(self, gan, samples_per_row=8):
     BaseSampler.__init__(self, gan, samples_per_row)
     self.inputs = self.gan.inputs.next()
     self.z = self.gan.latent.next()
Example #38
0
 def __init__(self, gan, samples_per_row=8):
     BaseSampler.__init__(self, gan, samples_per_row)
Example #39
0
 def __init__(self, gan):
     BaseSampler.__init__(self, gan)
     self.xa_v = None
     self.xb_v = None
     self.created = False
Example #40
0
 def __init__(self, gan, samples_per_row):
     BaseSampler.__init__(self, gan, samples_per_row)
     self.x_v = None
     self.z_v = None
     self.created = False
Example #41
0
 def __init__(self, gan, node, samples_per_row=8, x=None, z=None):
     self.node = node
     BaseSampler.__init__(self, gan, samples_per_row)
     self.z = None
     self.x = None
 def __init__(self, gan, samples_per_row=8):
     BaseSampler.__init__(self, gan, samples_per_row)
     self.latent = self.gan.latent.next().data.clone()