示例#1
0
    def load_pathnet(filename):
        log = None
        with open(filename, 'rb') as f:
            log = pickle.load(f)

        layers = []
        for layer_log in log['layer_logs']:
            if layer_log['layer_type'] == 'dense':
                layers.append(DenseLayer.build_from_log(layer_log))
            if layer_log['layer_type'] == 'conv':
                layers.append(ConvLayer.build_from_log(layer_log))

        Layer.initialize_whole_network(layers, log['in_shape'])
        for layer, layer_log in zip(layers, log['layer_logs']):
            layer.load_layer_log(layer_log)

        pathnet = PathNet(input_shape=log['in_shape'],
                          width=log['width'],
                          depth=log['depth'])
        pathnet._layers = layers
        pathnet.training_counter = log['training_counter']
        pathnet.max_modules_pr_layer = log['max_modules_pr_layer']
        pathnet.min_modules_pr_layer = log['min_modules_pr_layer']

        tasks = []
        for task_log in log['task_logs']:
            task = TaskContainer.build_from_log(task_log)
            pathnet.path2model(pathnet.random_path(), task)
            task.layer.set_weights(task_log['layer_weights'])
            tasks.append(task)

        pathnet._tasks = tasks

        return pathnet
示例#2
0
    def mnist(output_size=10):
        conv_config = [{
            'channels': 1,
            'kernel': (3, 3),
            'stride': (1, 1),
            'activation': 'relu'
        }]
        config = [{'out': 20, 'activation': 'relu'}]
        input_shape = [28, 28, 1]
        output_size = output_size
        depth = 3
        width = 10
        max_modules_pr_layer = 3
        min_modules_pr_layer = 1
        learning_rate = 0.0001
        optimizer_type = Adam
        loss = 'categorical_crossentropy'
        flatten_in_unique = True

        layers = []
        #layers.append(DenseLayer(width, 'L0', config, flatten=not flatten_in_unique))
        #layers.append(DenseLayer(width, 'L1', config))
        #layers.append(DenseLayer(width, 'L2', config))
        layers.append(ConvLayer(width, 'L0', conv_config))
        layers.append(ConvLayer(width, 'L1', conv_config))
        layers.append(ConvLayer(width, 'L2', conv_config, maxpool=True))

        Layer.initialize_whole_network(layers, input_shape)

        task = TaskContainer(input_shape,
                             output_size,
                             flatten_in_unique,
                             name='unique_mnist',
                             optimizer=optimizer_type,
                             loss=loss,
                             lr=learning_rate)

        pathnet = PathNet(input_shape=input_shape,
                          width=width,
                          depth=depth,
                          max_active_modules=20)
        pathnet._layers = layers
        pathnet._tasks = [task]
        pathnet.max_modules_pr_layer = max_modules_pr_layer
        pathnet.min_modules_pr_layer = min_modules_pr_layer

        for layer in pathnet._layers:
            layer.save_initialized_weights()

        return pathnet, task
示例#3
0
    def cifar10():
        conv_config = [{
            'channels': 3,
            'kernel': (3, 3),
            'stride': (1, 1),
            'activation': 'relu'
        }]
        dense_config = [{'out': 20, 'activation': 'relu'}]
        input_shape = [32, 32, 3]
        output_size = 10
        depth = 3
        width = 10
        max_modules_pr_layer = 3
        learning_rate = 0.001
        optimizer_type = Adam
        loss = 'categorical_crossentropy'

        layers = []
        layers.append(ConvLayer(width, 'L0', conv_config))
        layers.append(ConvLayer(width, 'L1', conv_config))
        layers.append(ConvLayer(width, 'L2', conv_config, maxpool=True))
        #layers.append(DenseLayer(width, 'L2', dense_config, flatten=True))

        Layer.initialize_whole_network(layers, input_shape)

        task = TaskContainer(input_shape,
                             output_size,
                             True,
                             name='unique_cifar10',
                             optimizer=optimizer_type,
                             loss=loss,
                             lr=learning_rate)

        pathnet = PathNet(input_shape=input_shape, width=width, depth=depth)
        pathnet._layers = layers
        pathnet._tasks = [task]
        pathnet.max_modules_pr_layer = max_modules_pr_layer

        for layer in pathnet._layers:
            layer.save_initialized_weights()

        return pathnet, task
示例#4
0
    def reset_backend_session(self):
        #print('==> Reseting backend session')
        for layer in self._layers:
            layer.save_layer_weights()

        for task in self._tasks:
            task.save_layer_weights()

        K.clear_session()

        for layer in self._layers:
            layer._init_layer()

        Layer.initialize_whole_network(self._layers, self.input_shape)

        for layer in self._layers:
            layer.load_layer_weights()

        for task in self._tasks:
            task.load_layer_weights()

        self._models_created_in_current_session = 1 + len(self._tasks)
示例#5
0
    def binary_mnist():
        config = [{'out': 20, 'activation': 'relu'}]
        input_shape = [28, 28, 1]
        output_size = 2
        depth = 3
        width = 10
        max_modules_pr_layer = 3
        learning_rate = 0.0001
        optimizer_type = SGD
        loss = 'binary_crossentropy'

        layers = []
        for l in range(depth):
            if len(layers) == 0:
                layers.append(DenseLayer(width, 'L0', config, flatten=True))
            else:
                layers.append(DenseLayer(width, 'L' + str(l), config))

        Layer.initialize_whole_network(layers, input_shape)

        task = TaskContainer(input_shape,
                             output_size,
                             name='unique_binary_mnist',
                             optimizer=optimizer_type,
                             loss=loss,
                             lr=learning_rate)

        pathnet = PathNet(input_shape=input_shape, width=width, depth=depth)
        pathnet._layers = layers
        pathnet._tasks = [task]
        pathnet.max_modules_pr_layer = max_modules_pr_layer

        for layer in pathnet._layers:
            layer.save_initialized_weights()

        return pathnet, task