Пример #1
0
    def nn_init(self, layer, initializer_option):
        if initializer_option is None:
            return
        if type(initializer_option) == pyparsing.ParseResults and type(
                initializer_option[0]) == hypergan.parser.Pattern:
            args = [initializer_option[0].layer_name
                    ] + initializer_option[0].args
            options = hc.Config(initializer_option[0].options)
        else:
            args = [initializer_option]
            options = hc.Config({})

        layer_data = layer.weight.data

        if args[0] == "uniform":
            a = float(args[1])
            b = float(args[2])
            nn.init.uniform_(layer_data, a, b)
        elif args[0] == "normal":
            mean = float(args[1])
            std = float(args[2])
            nn.init.normal_(layer_data, mean, std)
        elif args[0] == "constant":
            val = float(args[1])
            nn.init.constant_(layer_data, val)
        elif args[0] == "ones":
            nn.init.ones_(layer_data)
        elif args[0] == "zeros":
            nn.init.zeros_(layer_data)
        elif args[0] == "eye":
            nn.init.eye_(layer_data)
        elif args[0] == "dirac":
            nn.init.dirac_(layer_data)
        elif args[0] == "xavier_uniform":
            gain = nn.init.calculate_gain(options.gain or "relu")
            nn.init.xavier_uniform_(layer_data, gain=gain)
        elif args[0] == "xavier_normal":
            gain = nn.init.calculate_gain(options.gain or "relu")
            nn.init.xavier_normal_(layer_data, gain=gain)
        elif args[0] == "kaiming_uniform":
            a = 0  #TODO wrong
            nn.init.kaiming_uniform_(layer_data,
                                     mode=(options.mode or "fan_in"),
                                     nonlinearity=options.gain or "relu")
        elif args[0] == "kaiming_normal":
            a = 0  #TODO wrong
            nn.init.kaiming_normal_(layer_data,
                                    mode=(options.mode or "fan_in"),
                                    nonlinearity=options.gain or "relu")
        elif args[0] == "orthogonal":
            if "gain" in options:
                gain = nn.init.calculate_gain(options["gain"])
            else:
                gain = 1
            nn.init.orthogonal_(layer_data, gain=gain)
        else:
            print("Warning: No initializer found for " + args[0])
        if "gain" in options:
            layer_data.mul_(nn.init.calculate_gain(options["gain"]))
        return NoOp()
Пример #2
0
 def layer_split(self, net, args, options):
     options = hc.Config(options)
     split_size = args[0]
     select = args[1]
     dim = -1
     if options.dim:
         dim = options.dim
     #TODO better validation
     #TODO increase dim options
     if dim == -1:
         dims = list(self.current_size.dims).copy()
         dims[0] = split_size
         if (select + 1) * split_size > self.current_size.channels:
             dims[0] = self.current_size.channels % split_size
         self.current_size = LayerShape(*dims)
     return NoOp()
Пример #3
0
 def layer_latent(self, net, args, options):
     self.current_size = LayerShape(self.gan.latent.current_input_size)
     self.is_latent = True
     return NoOp()
Пример #4
0
 def layer_identity(self, net, args, options):
     return NoOp()