예제 #1
0
파일: mnist.py 프로젝트: tgey/DeepSwarm
mnist = tf.keras.datasets.mnist
(x_train, y_train), (x_test, y_test) = mnist.load_data()
# Normalize and reshape data
x_train, x_test = x_train / 255.0, x_test / 255.0
x_train = x_train.reshape(x_train.shape[0], 28, 28, 1)
x_test = x_test.reshape(x_test.shape[0], 28, 28, 1)
# Create dataset object, which controls all the data
normalized_dataset = Dataset(
    training_examples=x_train,
    training_labels=y_train,
    testing_examples=x_test,
    testing_labels=y_test,
    validation_split=0.1,
)
# Create backend responsible for training & validating
backend = TFKerasBackend(dataset=normalized_dataset)
# Create DeepSwarm object responsible for optimization
deepswarm = DeepSwarm(backend=backend)
# Find the topology for a given dataset
try:
    topology = deepswarm.find_topology()
except:
    Log.error(f'{sys.exc_info()} occured')
    Log.error(f'{traceback.format_exc()}')

# Evaluate discovered topology
deepswarm.evaluate_topology(topology)
# Train topology for additional 30 epochs
trained_topology = deepswarm.train_topology(topology, 30)
# Evaluate the final topology
deepswarm.evaluate_topology(trained_topology)
예제 #2
0
# Load CIFAR-10 dataset
cifar10 = tf.keras.datasets.cifar10
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
# Convert class vectors to binary class matrices
y_train = tf.keras.utils.to_categorical(y_train, 10)
y_test = tf.keras.utils.to_categorical(y_test, 10)
# Create dataset object, which controls all the data
dataset = Dataset(
    training_examples=x_train,
    training_labels=y_train,
    testing_examples=x_test,
    testing_labels=y_test,
    validation_split=0.1,
)
# Create backend responsible for training & validating
backend = TFKerasBackend(dataset=dataset)
# Create DeepSwarm object responsible for optimization
deepswarm = DeepSwarm(backend=backend)
# Find the topology for a given dataset
topology = deepswarm.find_topology()
# Evaluate discovered topology
deepswarm.evaluate_topology(topology)
# Train topology on augmented data for additional 50 epochs
trained_topology = deepswarm.train_topology(topology,
                                            50,
                                            augment={
                                                'rotation_range': 15,
                                                'width_shift_range': 0.1,
                                                'height_shift_range': 0.1,
                                                'horizontal_flip': True,
                                            })
예제 #3
0
    def aco_train(self, ui):

        # function which uses aco to train on the inputted dataset
        global x_train, y_train, x_test, y_test, trained_topology

        # modify YAML file by taking a parameter
        if ui.dataset == 'cifar10':

            inp_fo = open("settings/cifar10.yaml").read()  # Read the Yaml File
            yaml = YAML()  # Load the yaml object
            # print(ui.m_depth, ui.n_ant, ui.epochs)

            code = yaml.load(
                inp_fo)  # Load content of YAML file to yaml object
            code['DeepSwarm'][
                'max_depth'] = ui.m_depth  # Update Yaml file with new parameter in object
            code['DeepSwarm']['aco'][
                'ant_count'] = ui.n_ant  # Update Yaml file with new parameter in object
            code['DeepSwarm']['backend'][
                'epochs'] = ui.epochs  # Update Yaml file with new parameter in object

            inp_fo2 = open("settings/cifar10.yaml",
                           "w")  # Open the file for Write
            yaml.dump(code, inp_fo2)  # Write to file with new parameter
            inp_fo2.close()  # close the file

        elif ui.dataset == 'fashion-mnist':

            inp_fo = open(
                "settings/fashion-mnist.yaml").read()  # Read the Yaml File
            yaml = YAML()  # Load the yaml object
            # print(ui.m_depth, ui.n_ant, ui.epochs)

            code = yaml.load(
                inp_fo)  # Load content of YAML file to yaml object
            code['DeepSwarm'][
                'max_depth'] = ui.m_depth  # Update Yaml file with new parameter in object
            code['DeepSwarm']['aco'][
                'ant_count'] = ui.n_ant  # Update Yaml file with new parameter in object
            code['DeepSwarm']['backend'][
                'epochs'] = ui.epochs  # Update Yaml file with new parameter in object

            inp_fo2 = open("settings/fashion-mnist.yaml",
                           "w")  # Open the file for Write
            yaml.dump(code, inp_fo2)  # Write to file with new parameter
            inp_fo2.close()  # close the file

        # important to leave deepswarm import at this point otherwise
        # user customization won't update the yaml file
        from deepswarm.backends import Dataset, TFKerasBackend

        print("ACO Training of dataset ...")

        if ui.dataset == 'fashion-mnist':

            (x_train, y_train), (x_test, y_test) = fashion_mnist.load_data(
            )  # Load Fashion MNIST dataset
            x_train, x_test = x_train / 255.0, x_test / 255.0  # Normalize and reshape data
            x_train = x_train.reshape(x_train.shape[0], 28, 28, 1)
            x_test = x_test.reshape(x_test.shape[0], 28, 28, 1)

        elif ui.dataset == 'cifar10':

            (x_train,
             y_train), (x_test,
                        y_test) = cifar10.load_data()  # Load CIFAR-10 dataset
            y_train = tf.keras.utils.to_categorical(
                y_train, 10)  # Convert class vectors to binary class matrices
            y_test = tf.keras.utils.to_categorical(y_test, 10)

        # import is at this point since _init_.py for deepswarm module is invoked once module is imported
        # the modified yaml settings (above) will not be loaded if deepswarm imported at start of prorgram.
        from deepswarm.deepswarm import DeepSwarm

        # Create dataset object, which controls all the data
        dataset = Dataset(training_examples=x_train,
                          training_labels=y_train,
                          testing_examples=x_test,
                          testing_labels=y_test,
                          validation_split=0.1)

        backend = TFKerasBackend(
            dataset=dataset
        )  # Create backend responsible for training & validating
        deepswarm = DeepSwarm(
            backend=backend
        )  # Create DeepSwarm object responsible for optimization
        topology = deepswarm.find_topology(
        )  # Find the topology for a given dataset
        deepswarm.evaluate_topology(topology)  # Evaluate discovered topology

        if ui.dataset == 'cifar10':
            trained_topology = deepswarm.train_topology(
                topology,
                50,
                augment={
                    'rotation_range': 15,
                    'width_shift_range': 0.1,
                    'height_shift_range': 0.1,
                    'horizontal_flip': True,
                })
        if ui.dataset == 'fashion-mnist':
            # Train topology on augmented data for additional 50 epochs
            trained_topology = deepswarm.train_topology(
                topology,
                50,
                augment={
                    'rotation_range': 15,
                    'width_shift_range': 0.1,
                    'height_shift_range': 0.1,
                    'horizontal_flip': True,
                })

        deepswarm.evaluate_topology(
            trained_topology)  # Evaluate the final topology

        return