Esempio n. 1
0
    def to_descriptor(self, dimensions=2):

        self.active_nodes = 0
        descriptor = NeuralDescriptor()

        actives = set()
        self.actives = set()
        # Get only active nodes
        for p in self.get_direct_paths():
            for n in p:
                actives.add(n)

        # First add the nodes themselves
        for g in self.nodes.genes:
            gene = self.nodes.genes[g]
            # Don't add inactive nodes
            if gene.innovation_number in actives and gene.enabled:
                if not gene.io:
                    self.active_nodes += 1
                    self.actives.add(str(gene.value))
                    # Get the node's name (innovation number)
                    innv = str(gene.innovation_number)

                    # Get the parameters
                    parameters = gene.value

                    filter_no = int(parameters[0])
                    dropout_rate = float(parameters[1])
                    weight_scale = float(parameters[2])
                    kernel_size = int(parameters[3])

                    max_pool = True if int(parameters[4]) == 1 else False
                    out_channels = filter_no

                    # --Define the layers and parameters--

                    # Convolution layer
                    conv_layer = nn.Conv2d
                    if dimensions == 1:
                        conv_layer = nn.Conv1d
                    conv_parameters = {
                        'in_channels': 1000,
                        'out_channels': out_channels,
                        'kernel_size': kernel_size
                    }

                    descriptor.add_layer(conv_layer,
                                         conv_parameters,
                                         name=innv + 'in')

                    # Scale the weights
                    descriptor.add_layer_sequential(ScaleLayer,
                                                    {'scale': weight_scale},
                                                    name=innv + 'scale')

                    # Dropout layer
                    if dimensions == 2:
                        dout = nn.Dropout2d
                    else:
                        dout = nn.Dropout
                    dout_parameters = {'p': dropout_rate}
                    descriptor.add_layer_sequential(dout,
                                                    dout_parameters,
                                                    name=innv + 'dout')

                    # Max pool layer
                    if max_pool:
                        pool = nn.MaxPool2d
                        if dimensions == 1:
                            pool = nn.MaxPool1d
                        pool_parameters = {
                            'kernel_size': kernel_size,
                            'stride': kernel_size
                        }
                        descriptor.add_layer_sequential(pool,
                                                        pool_parameters,
                                                        name=innv + 'pool')

                    # Activation layer
                    descriptor.add_layer_sequential(nn.ReLU6, {},
                                                    name=innv + 'out')

        # Add IO layers
        descriptor.add_layer(Identity, {}, name='-2out')
        descriptor.add_layer(Identity, {}, name='-1in')
        descriptor.first_layer = '-2out'
        descriptor.last_layer = '-1in'

        # Connect the layers
        for g in self.connections.genes:
            gene = self.connections.genes[g]
            from_, to_ = gene.value
            # Connect all active
            if gene.enabled:
                # Only connecitons from/to active nodes should be added
                if from_ in actives and to_ in actives:

                    last_out = str(from_) + 'out'
                    descriptor.connect_layers(last_out, str(to_) + 'in')

        return descriptor
# Instantiate the evaluator
# define the optimizer CLASS and
# any parameters you wish to utilize
# with the optimizer

evaluator = LocalEvaluator(optimizer_class=opt.Adam,
                           optimizer_params={}, verbose=True)

# Select dataset from:
# - cifar10 (requires 2d conv and pool, i.e. conv = nn.Conv2d, pool = nn.MaxPool2d)
# - fashion-mnist (requires 2d conv and pool, i.e. conv = nn.Conv2d, pool = nn.MaxPool2d)
# - activity_recognition (requires 1d conv and pool, i.e. conv = nn.Conv1d, pool = nn.MaxPool1d)
dataset = 'activity_recognition'

# Instantiate a descriptor
d = NeuralDescriptor()
conf = Configs()

# Define layer presets
conv = nn.Conv1d
# Note that first layer has number of channels
# equal with the dataset
conv_params = {'in_channels': conf.CHANNELS[dataset],
               'out_channels': 5, 'kernel_size': 3}

conv_2_params = {'in_channels': 5,
                 'out_channels': 10, 'kernel_size': 3}

pool = nn.MaxPool1d
pool_params = {'kernel_size': 2, 'stride': 2}
Esempio n. 3
0
"""
Example usage of BenchmarkEvaluator,
which evaluates the given architecture in NASBench-101

"""
from nord.neural_nets import BenchmarkEvaluator, NeuralDescriptor

# Instantiate the evaluator
evaluator = BenchmarkEvaluator()
# Instantiate a descriptor
d = NeuralDescriptor()

# See the available layers (ops)
# for NASBench-101
layers = evaluator.get_available_ops()
print(layers)

# Add NASBench-101 Layers connected
# sequentially
d.add_layer('input', None, 'in')
d.add_layer_sequential(layers[0], None, 'layer_1')
d.add_layer_sequential(layers[2], None, 'layer_2')
d.add_layer_sequential('output', None, 'out')

# Add Connections
d.connect_layers('layer_1', 'out')

# Get the validation accuracy and training time
val_acc, train_time = evaluator.descriptor_evaluate(d,
                                                    acc='validation_accuracy')
Esempio n. 4
0
    def to_descriptor(self, dimensions=2):

        assert dimensions <= 2

        self.active_nodes = 0
        descriptor = NeuralDescriptor()

        actives = set()
        self.actives = set()
        # Get only active nodes
        for p in self.get_direct_paths():
            for n in p:
                actives.add(n)

        # First add the nodes themselves
        for g in self.nodes.genes:
            gene = self.nodes.genes[g]
            # Don't add inactive nodes
            if gene.innovation_number in actives and gene.enabled:
                if not gene.io:
                    self.active_nodes += 1
                    self.actives.add(str(gene.value))
                    # Get the node's name (innovation number)
                    innv = str(gene.innovation_number)

                    # Get the parameters
                    selected_layer, params = layers_list[gene.value[0]], None
                    if '.' in selected_layer:
                        selected_layer, params = selected_layer.split('.')

                    layer, kernel = selected_layer.split('_')
                    channels = self.channels
                    kernel = int(kernel)
                    conv = False
                    if layer == 'CONV':
                        conv = True
                        if params == 'H':
                            channels = int(channels/2)
                        layer = nn.Conv2d
                        parameters = {'in_channels': 1000,
                                      'out_channels': channels,
                                      'kernel_size': kernel,
                                      'stride': self.strides}
                        if kernel == 151:
                            layer = Conv2d151
                            parameters = {'in_channels': 1000,
                                          'out_channels': channels}

                    elif layer == 'POOL':
                        layer = nn.MaxPool2d
                        if params == 'A':
                            layer = nn.AvgPool2d
                        parameters = {'kernel_size': kernel,
                                      'stride': kernel}

                    descriptor.add_layer(layer, parameters, name=innv+'in')
                    # Activation layer
                    if conv:
                        descriptor.add_layer_sequential(
                            nn.ReLU6, {}, name=innv+'relu')
                        descriptor.add_layer_sequential(
                            nn.BatchNorm2d, {'num_features': channels},
                            name=innv+'batchnorm')
                        descriptor.add_layer_sequential(
                            nn.Dropout, {'p': DROPOUT_PROB}, name=innv+'dropout')

                    descriptor.add_layer_sequential(
                        Identity, {}, name=innv+'out')

        # Add IO layers
        descriptor.add_layer(Identity, {}, name=INPUT_NAME)
        descriptor.add_layer(Identity, {}, name=OUTPUT_NAME)
        descriptor.first_layer = INPUT_NAME
        descriptor.last_layer = OUTPUT_NAME

        # Connect the layers
        for g in self.connections.genes:
            gene = self.connections.genes[g]
            from_, to_ = gene.value
            # Connect all active
            if gene.enabled:
                # Only connecitons from/to active nodes should be added
                if from_ in actives and to_ in actives:

                    last_out = str(from_)+'out'
                    to_layer = str(to_)+'in'

                    if from_ == INPUT:
                        last_out = INPUT_NAME
                    elif from_ == OUTPUT:
                        last_out = OUTPUT_NAME

                    if to_ == INPUT:
                        to_layer = INPUT_NAME
                    elif to_ == OUTPUT:
                        to_layer = OUTPUT_NAME

                    descriptor.connect_layers(last_out, to_layer)

        return descriptor
    def to_descriptor(self, modules_list, dimensions=2):

        assert dimensions == 2

        self.active_nodes = 0
        descriptor = NeuralDescriptor()

        actives = set()
        self.actives = set()
        # Get only active nodes
        for p in self.get_direct_paths():
            for n in p:
                actives.add(n)

        # First add the nodes themselves
        for g in self.nodes.genes:
            gene = self.nodes.genes[g]
            # Don't add inactive nodes
            if gene.innovation_number in actives and gene.enabled:
                if not gene.io:
                    self.active_nodes += 1
                    self.actives.add(str(gene.value))
                    # Get the node's name (innovation number)
                    innv = str(gene.innovation_number)

                    # Get the parameters
                    selected_module = gene.value
                    module = modules_list[selected_module]
                    module_descriptor = module.to_descriptor()
                    module_descriptor.add_suffix('_' + innv)
                    descriptor.layers.update(module_descriptor.layers)
                    descriptor.incoming_connections.update(
                        module_descriptor.incoming_connections)
                    descriptor.connections.update(
                        module_descriptor.connections)

        # Add IO layers
        descriptor.add_layer(Identity, {}, name=INPUT_NAME)
        descriptor.add_layer(Identity, {}, name=OUTPUT_NAME)
        descriptor.first_layer = INPUT_NAME
        descriptor.last_layer = OUTPUT_NAME

        # Connect the layers
        for g in self.connections.genes:
            gene = self.connections.genes[g]
            from_, to_ = gene.value
            # Connect all active
            if gene.enabled:
                # Only connecitons from/to active nodes should be added
                if from_ in actives and to_ in actives:
                    from_name = OUTPUT_NAME + '_' + str(from_)
                    to_name = INPUT_NAME + '_' + str(to_)

                    if from_ == INPUT:
                        from_name = INPUT_NAME
                    elif from_ == OUTPUT:
                        from_name = OUTPUT_NAME

                    if to_ == INPUT:
                        to_name = INPUT_NAME
                    elif to_ == OUTPUT:
                        to_name = OUTPUT_NAME
                    descriptor.connect_layers(from_name, to_name)

        return descriptor
Esempio n. 6
0
from nord.neural_nets import NeuralDescriptor, NeuralNet

# Define layer presets
conv = nn.Conv2d
conv_params = {'in_channels': 3, 'out_channels': 5, 'kernel_size': 3}

conv_2_params = {'in_channels': 5, 'out_channels': 10, 'kernel_size': 5}

pool = nn.MaxPool2d
pool_params = {'kernel_size': 2, 'stride': 2}

pool2_params = {'kernel_size': 2, 'stride': 5}

# # Sequential Example
# # Instantiate a descriptor
d = NeuralDescriptor()

# Add layers sequentially
d.add_layer_sequential(conv, conv_params)
d.add_layer_sequential(conv, conv_2_params)
d.add_layer_sequential(pool, pool_params)
d.add_layer_sequential(pool, pool2_params)

# Instantiate the network
net = NeuralNet(net_descriptor=d,
                num_classes=10,
                input_shape=(32, 32),
                input_channels=3)
# Print it
print(net)
# Plot it