def __init__(self, gan, config, name=None, input=None, reuse=None, x=None, g=None, features=[], skip_connections=[]): ConfigurableComponent.__init__(self, gan, config, name=name, input=input, features=features, reuse=reuse, x=x, g=g) BaseGenerator.__init__(self, gan, config, name=name, input=input, reuse=reuse, x=x, g=g)
def __init__(self, gan, config, name=None, input=None, reuse=None, features=[], skip_connections=[]): ConfigurableComponent.__init__(self, gan, config, name=name, input=input, features=features, reuse=reuse) BaseDiscriminator.__init__(self, gan, config, name=name, input=input, features=features, reuse=reuse)
def layer_filter(self, net, args=[], options={}): config = self.config gan = self.gan ops = self.ops concats = [] if 'layer_filter' in config and config.layer_filter is not None: print("[discriminator] applying layer filter", config['layer_filter']) filters = [] stacks = self.ops.shape(net)[0] // gan.batch_size() for stack in range(stacks): piece = tf.slice(net, [stack * gan.batch_size(), 0, 0, 0], [gan.batch_size(), -1, -1, -1]) filters.append( ConfigurableComponent.layer_filter(self, piece, args, options)) layer = tf.concat(axis=0, values=filters) concats.append(layer) if len(concats) > 1: net = tf.concat(axis=3, values=concats) return net
def __init__(self, gan, config, *args, **kw_args): self.current_input_size = gan.config.latent["z"] ConfigurableComponent.__init__(self, gan, config,*args, **kw_args)
def __init__(self, gan, config, *args, **kw_args): ConfigurableComponent.__init__(self, gan, config, *args, **kw_args)
def __init__(self, gan, config, name=None, input=None, reuse=None, features=[], skip_connections=[]): ConfigurableComponent.__init__(self, gan, config, name=name, input=input,features=features,reuse=reuse) BaseDiscriminator.__init__(self, gan, config, name=name, input=input,features=features,reuse=reuse)