Exemple #1
0
class LinearAC(LinearOnly):
    ac = ActivationParam()

    def __init__(
        self,
        start_shape,
        stop_shape,
        min_features=10,
        max_features=128,
        depth=2,
        bias_prob=0.0,
    ):
        LinearOnly.__init__(
            self, start_shape, stop_shape, min_features, max_features, depth, bias_prob
        )
        self.use_ac = BinaryParam(name="")
        self.use_ac.randomize(true_prob=0.7)

    def generate(self):
        limits = self.limits
        unit_list = []
        for j in range(self.depth):
            if j == 0:
                in_shape = self.start_shape
            else:
                in_shape = unit_list[-1].out_shape
            if j == self.depth - 1:
                out_shape = self.stop_shape
            else:
                out_shape = None
            unit_list.append(self.layers[0](in_shape, out_shape))
            if self.use_ac.value and out_shape is None:
                unit_list.append(
                    self.ac.val(unit_list[-1].out_shape, unit_list[-1].out_shape)
                )
            self.layers[0].out_features.limits = (limits[0], unit_list[-1].out_shape[0])

        self.layers[0].out_features.limits = limits
        return unit_list

    def __call__(self, num_nets, startnum=1):
        self.name = "Linear{}".format(self.ac.val.__class__.__name__)
        return _Net.__call__(self, num_nets, startnum)
Exemple #2
0
class ConvAC(ConvOnly):
    ac = ActivationParam()

    def __init__(self, start_shape, stop_shape, depth, convclass, bias_prob):
        ConvOnly.__init__(self, start_shape, stop_shape, depth, convclass,
                          bias_prob)
        self.use_ac = BinaryParam(name="")
        self.use_ac.randomize(true_prob=0.7)

    def generate(self, resnet_cover=256):
        t = len(self.start_shape) - 1
        self.layers[0].kernel_size.randomize(
            limits=((1, ) * t,
                    (min(list(self.start_shape[1:]) + [resnet_cover]), ) * t))
        limits = self.layers[0].kernel_size.limits
        unit_list = []
        for j in range(self.depth):
            if j == 0:
                in_shape = self.start_shape
            else:
                in_shape = unit_list[-1].out_shape
            if j == self.depth - 1:
                out_shape = self.stop_shape
            else:
                out_shape = None
            unit_list.append(self.layers[0](in_shape, out_shape))
            if self.use_ac.value and out_shape is None:
                unit_list.append(
                    self.ac.val(unit_list[-1].out_shape,
                                unit_list[-1].out_shape))
            out_shape = unit_list[-1].out_shape
            t = len(out_shape) - 1
            self.layers[0].kernel_size.randomize(
                limits=((1, ) * t, (min(out_shape[1:]), ) * t))
        self.layers[0].kernel_size.randomize(limits=limits)
        return unit_list

    def __call__(self, num_nets, startnum=1):
        self.name = "{}".format(
            self.__class__.__name__.replace("AC",
                                            self.ac.val.__class__.__name__))
        return _Net.__call__(self, num_nets, startnum)