class SwitchNodeGene(DefaultNodeGene):

    _gene_attributes = [
        FloatAttribute('bias'),
        StringAttribute('activation', options='sigmoid'),
        StringAttribute('aggregation', options='sum'),
        BoolAttribute('is_switch')
    ]

    def distance(self, other, config):
        d = abs(self.bias + other.bias)
        if self.activation != other.activation:
            d += 1.0
        if self.aggregation != other.aggregation:
            d += 1.0
        if self.is_switch != other.is_switch:
            d = 3
        return d * config.compatibility_weight_coefficient
示例#2
0
class FormNodeGene(BaseGene):
    __gene_attributes__ = [
        StringAttribute('type',
                        options=['union', 'intersection'],
                        default='union')
    ]

    def distance(self, other, config):
        return 0.0
示例#3
0
class DefaultNodeGene(BaseGene):
    _gene_attributes = [FloatAttribute('bias'),
                        FloatAttribute('response'),
                        StringAttribute('activation', options='sigmoid'),
                        StringAttribute('aggregation', options='sum')]

    def __init__(self, key):
        assert isinstance(key, int), "DefaultNodeGene key must be an int, not {!r}".format(key)
        BaseGene.__init__(self, key)

    def distance(self, other, config):
        # ノード同士の距離は バイアスとレスポンスの絶対値の差
        # に加えて 活性化関数や集約関数の種族によって決まる
        d = abs(self.bias - other.bias) + abs(self.response - other.response)
        if self.activation != other.activation:
            d += 1.0
        if self.aggregation != other.aggregation:
            d += 1.0
        return d * config.compatibility_weight_coefficient
示例#4
0
class DefaultNodeGene(BaseGene):
    __gene_attributes__ = [
        FloatAttribute('bias'),
        FloatAttribute('response'),
        StringAttribute('activation'),
        StringAttribute('aggregation')
    ]

    @classmethod
    def parse_config(cls, config, param_dict):
        return DefaultGeneConfig(cls.__gene_attributes__, param_dict)

    def distance(self, other, config):
        d = abs(self.bias - other.bias) + abs(self.response - other.response)
        if self.activation != other.activation:
            d += 1.0
        if self.aggregation != other.aggregation:
            d += 1.0
        return d * config.compatibility_weight_coefficient
示例#5
0
文件: genes.py 项目: oldoc/neat-cnn
class DefaultNodeGene(BaseGene):
    _gene_attributes = [FloatAttribute('bias'),
                        FloatAttribute('response'),
                        StringAttribute('activation', options='sigmoid'),
                        StringAttribute('aggregation', options='sum'),
                        ListAttribute('kernal')]

    def __init__(self, key, layer):
        assert isinstance(key, int), "DefaultNodeGene key must be an int, not {!r}".format(key)
        BaseGene.__init__(self, key)
        # Added by Andrew
        self.layer = layer

    def distance(self, other, config):
        d = abs(self.bias - other.bias) + abs(self.response - other.response)
        if self.activation != other.activation:
            d += 1.0
        if self.aggregation != other.aggregation:
            d += 1.0
        return d * config.compatibility_weight_coefficient
示例#6
0
class MapNodeGene(DefaultNodeGene):

    _gene_attributes = [
        FloatAttribute('bias'),  #The bias of the neuron
        StringAttribute('activation', options='sigmoid'
                        ),  # The activation function, tunable from the config
        StringAttribute('aggregation',
                        options='sum'),  #The aggregation function
        BoolAttribute('is_isolated')
    ]  #Map vs isolated neuron

    def distance(self, other, config):
        d = 0
        if self.activation != other.activation:
            d += 1.0
        if self.aggregation != other.aggregation:
            d += 1.0
        if self.is_isolated != other.is_isolated:
            d += 1
        return d * config.compatibility_weight_coefficient
示例#7
0
class CircuitConnectionGene(BaseGene):
    __gene_attributes__ = [StringAttribute('component'),
                           FloatAttribute('value'),
                           BoolAttribute('enabled')]

    def distance(self, other, config):
        d = abs(self.value - other.value)
        if self.component != other.component:
            d += 1.0
        if self.enabled != other.enabled:
            d += 1.0
        return d * config.compatibility_weight_coefficient
示例#8
0
class GRUNodeGene(BaseGene):
    _gene_attributes = [
        FloatAttribute('bias'),
        FloatAttribute('response'),
        StringAttribute('activation', options='tanh'),
        StringAttribute('aggregation', options='sum'),
        DynamicAttribute('read_gate', options=[]),
        DynamicAttribute('forget_gate')
    ]
    _read_gate_idx = 4
    _forget_gate_idx = 5

    def __init__(self, key):
        assert isinstance(
            key, int), "GRUNodeGene key must be an int, not {!r}".format(key)
        BaseGene.__init__(self, key)

    # TODO: Gates should influence the distance
    def distance(self, other, config):
        d = abs(self.bias - other.bias) + abs(self.response - other.response)
        if self.activation != other.activation:
            d += 1.0
        if self.aggregation != other.aggregation:
            d += 1.0
        return d * config.compatibility_weight_coefficient

    def mutate(self, *args, **kwargs):
        # We need to update the gate options list before mutating
        pass

    def mutate_safe(self, *args, **kwargs):
        BaseGene.mutate(self, *args, **kwargs)

    def inform_deleted(self, key, config):
        if self.read_gate == key:
            self.read_gate = self._gene_attributes[
                self._read_gate_idx].init_value(config)
        if self.forget_gate == key:
            self.forget_gate = self._gene_attributes[
                self._forget_gate_idx].init_value(config)
示例#9
0
class DefaultNodeGene(BaseGene):
    _gene_attributes: List[BaseAttribute] = [
        FloatAttribute("bias"),
        FloatAttribute("response"),
        StringAttribute("activation", options="sigmoid"),
        StringAttribute("aggregation", options="sum"),
    ]

    def __init__(self, key: int):
        assert isinstance(
            key,
            int), "DefaultNodeGene key must be an int, not {!r}".format(key)
        BaseGene.__init__(self, key)

    def distance(self, other: DefaultNodeGene,
                 config: DefaultGenomeConfig) -> float:
        # ノード同士の距離は、バイアスとレスポンスの絶対値の差に加えて、活性化関数や集約関数の種族によって決まる
        d = abs(self.bias - other.bias) + abs(self.response - other.response)
        if self.activation != other.activation:
            d += 1.0
        if self.aggregation != other.aggregation:
            d += 1.0
        return d * config.compatibility_weight_coefficient
示例#10
0
class StateGene(BaseGene):
    """" Class representing the gene of a state in the state machine. """

    _gene_attributes = [
        BiasesAttribute('biases'),
        WeightsAttribute('weights'),
        StringAttribute('activation'),
        StringAttribute('aggregation')
    ]

    def __init__(self, key):
        assert isinstance(
            key, int), "StateGene key must be an int, not {!r}".format(key)
        BaseGene.__init__(self, key)

    def distance(self, other, config):
        assert isinstance(other, StateGene)

        # Calculate the average difference in bias.
        avg_bias_difference = sum(
            np.abs(np.subtract(self.biases, other.biases))) / self.biases.size
        # Calculate the average difference in weights.
        avg_weight_difference = sum(
            sum(np.abs(np.subtract(self.weights,
                                   other.weights)))) / self.weights.size

        return config.compatibility_difference_coefficient * (
            avg_bias_difference + avg_weight_difference)

    def copy(self):
        state = StateGene(self.key)
        state.biases = np.array(self.biases)
        state.weights = np.array(self.weights)
        state.activation = self.activation
        state.aggregation = self.aggregation

        return state
示例#11
0
class DHNNodeGene(BaseGene):
    _gene_attributes = [
        FloatAttribute('bias'),
        FloatAttribute('response'),
        StringAttribute(
            'activation',
            options=['sigmoid', 'dhngauss', 'dhngauss2', 'linear', 'tanh']),
        StringAttribute('aggregation', options='sum')
    ]

    def __init__(self, key, cppn_tuple=((), ())):
        self.cppn_tuple = cppn_tuple
        assert isinstance(
            key,
            int), "DefaultNodeGene key must be an int, not {!r}".format(key)
        BaseGene.__init__(self, key)

    def distance(self, other, config):
        d = abs(self.bias - other.bias) + abs(self.response - other.response)
        if self.activation != other.activation:
            d += 1.0
        if self.aggregation != other.aggregation:
            d += 1.0
        return d * config.compatibility_weight_coefficient
示例#12
0
class CircuitConnectionGene(BaseGene):
    __gene_attributes__ = [
        StringAttribute('component'),
        FloatAttribute('value'),
        BoolAttribute('enabled')
    ]

    @classmethod
    def parse_config(cls, config, param_dict):
        return DefaultGeneConfig(cls.__gene_attributes__, param_dict)

    def distance(self, other, config):
        d = abs(self.value - other.value)
        if self.component != other.component:
            d += 1.0
        if self.enabled != other.enabled:
            d += 1.0
        return d * config.compatibility_weight_coefficient
示例#13
0
class TensorGene(BaseGene):
    """
        For now weights are not mutating
        Aggregation is handling inside tensor neural_net class
    """
    _gene_attributes = [
        FloatAttribute('bias'),
        StringAttribute('activation', options='sigmoid')
    ]

    def __init__(self, key):
        assert isinstance(
            key,
            int), "DefaultNodeGene key must be an int, not {!r}".format(key)
        BaseGene.__init__(self, key)
        self.weights = []

    def distance(self, other, config):
        d = abs(self.bias -
                other.bias)  # + abs(self.response - other.response)
        if self.activation != other.activation:
            d += 1.0
        return d * config.compatibility_weight_coefficient

    @classmethod
    def get_config_params(cls):
        params = []
        if not hasattr(cls, '_gene_attributes'):
            setattr(cls, '_gene_attributes', getattr(cls,
                                                     '__gene_attributes__'))
            warnings.warn(
                "Class '{!s}' {!r} needs '_gene_attributes' not '__gene_attributes__'"
                .format(cls.__name__, cls), DeprecationWarning)
        for a in cls._gene_attributes:
            if hasattr(a, 'get_config_params'):
                params += a.get_config_params()
        return params

    def init_attributes(self, config):
        for a in self._gene_attributes:
            if hasattr(a, 'name'):
                setattr(self, a.name, a.init_value(config))
示例#14
0
class DefaultNodeGene(BaseGene):
    _gene_attributes = [
        StringAttribute('type_of_layer', options='conv2d dense'),
        StringAttribute('num_of_nodes', options='512 1024 2048'),
        StringAttribute('activation', options='relu sigmoid'),
        StringAttribute('num_filters_conv', options='32 64 96'),
        StringAttribute('kernel_size_conv', options='1 3 5 7'),
        StringAttribute('stride_conv', options='1 2'),
        StringAttribute('stride_pool', options='1 2'),
        StringAttribute('poolsize_pool', options='2 3'),
        StringAttribute('has_maxpool', options='true false')
    ]

    def __init__(self, key):
        assert isinstance(
            key,
            int), "DefaultNodeGene key must be an int, not {!r}".format(key)
        BaseGene.__init__(self, key)

    def distance(self, other, config):

        factors = {
            "type_of_layer": 10,
            "num_of_nodes": 8,
            "activation": 1,
            "num_filters_conv": 1,
            "kernel_size_conv": 2,
            "stride_conv": 1.5,
            "stride_pool": 1.5,
            "poolsize_pool": 2
        }

        d = 0.0
        if self.type_of_layer != other.type_of_layer:
            d += factors["type_of_layer"]
        else:
            if self.type_of_layer == "dense":
                d += (
                    abs(float(self.num_of_nodes) - float(other.num_of_nodes)) /
                    1536) * factors["num_of_nodes"]
            else:
                if self.num_filters_conv != other.num_filters_conv:
                    d += factors["num_filters_conv"]

                if self.kernel_size_conv != other.kernel_size_conv:
                    d += factors["kernel_size_conv"]

                if self.stride_conv != other.stride_conv:
                    d += factors["stride_conv"]

                if (self.has_maxpool == "true" or other.has_maxpool == "true"):
                    if self.stride_pool != other.stride_pool:
                        d += factors["stride_pool"]

                    if self.stride_pool != other.stride_pool:
                        d += factors["stride_pool"]

        if self.activation != other.activation:
            d += factors["activation"]

        return d * config.compatibility_weight_coefficient