Exemple #1
0
def main():
    """ Main function. """
    # Obtain the search space
    nn_domain = get_nn_domain_from_constraints('cnn', MAX_NUM_LAYERS,
                                               MIN_NUM_LAYERS, MAX_MASS,
                                               MIN_MASS, MAX_IN_DEGREE,
                                               MAX_OUT_DEGREE, MAX_NUM_EDGES,
                                               MAX_NUM_UNITS_PER_LAYER,
                                               MIN_NUM_UNITS_PER_LAYER)
    # Obtain a worker manager: A worker manager (defined in opt/worker_manager.py) is used
    # to manage (possibly) multiple workers. For a synthetic experiment, we will use a
    # synthetic worker manager with 1 worker.
    worker_manager = SyntheticWorkerManager(1)
    # Obtain a function caller: A function_caller is used to evaluate a function defined on
    # neural network architectures. Here, we have obtained a function_caller from a
    # synthetic function, but for real experiments, you might have to write your own caller.
    # See the MLP/CNN demos for an example.
    func_caller = FunctionCaller(cnn_syn_func1, nn_domain)
    # Finally, specify the budget. In this case, it will be just the number of evaluations.
    budget = 20

    # Run nasbot
    opt_val, opt_nn, _ = nasbot.nasbot(func_caller, worker_manager, budget)

    # Print the optimal value and visualise the best network.
    print '\nOptimum value found: ', opt_val
    print 'Optimal network visualised in syn_opt_network.eps.'
    visualise_nn(opt_nn, 'syn_opt_network')
Exemple #2
0
def main():
    """ Main function. """
    # Obtain the search space
    nn_domain = get_nn_domain_from_constraints('cnn', MAX_NUM_LAYERS,
                                               MIN_NUM_LAYERS, MAX_MASS,
                                               MIN_MASS, MAX_IN_DEGREE,
                                               MAX_OUT_DEGREE, MAX_NUM_EDGES,
                                               MAX_NUM_UNITS_PER_LAYER,
                                               MIN_NUM_UNITS_PER_LAYER)
    # Obtain a worker manager: A worker manager (defined in opt/worker_manager.py) is used
    # to manage (possibly) multiple workers. For a RealWorkerManager, the budget should be
    # given in wall clock seconds.
    worker_manager = RealWorkerManager(GPU_IDS, EXP_DIR)
    # Obtain a function caller: A function_caller is used to evaluate a function defined on
    # neural network architectures. We have defined the CNNFunctionCaller in
    # demos/cnn_function_caller.py. The train_params argument can be used to specify
    # additional training parameters such as the learning rate etc.
    train_params = Namespace(data_dir=DATA_DIR)
    func_caller = CNNFunctionCaller('cifar10',
                                    nn_domain,
                                    train_params,
                                    tmp_dir=TMP_DIR,
                                    reporter=REPORTER)

    # Run nasbot
    opt_val, opt_nn, _ = nasbot.nasbot(func_caller,
                                       worker_manager,
                                       BUDGET,
                                       reporter=REPORTER)

    # Print the optimal value and visualise the best network.
    REPORTER.writeln('\nOptimum value found: %f' % opt_val)
    visualise_file = os.path.join(EXP_DIR, 'cnn_opt_network')
    REPORTER.writeln('Optimal network visualised in %s.eps' % (visualise_file))
    visualise_nn(opt_nn, EXP_DIR + visualise_file)
 def test_call_with_list(self):
     """ Tests the __call__ function with a single input of the modifier. """
     self.report('Testing the __call__ function with a list of inputs.')
     num_modifications = 40
     num_steps_probs = [0.5, 0.25, 0.125, 0.075, 0.05]
     save_dir = os.path.join(self.save_dir, 'modifier_call_list')
     if os.path.exists(save_dir):
         rmtree(save_dir)
     test_probs = [
         self.cnns, self.mlps,
         generate_mlp_architectures('class')
     ]
     for idx, prob in enumerate(test_probs):
         save_prefix = str(idx)
         modifier = self.modifier_wo_cc
         modifications = modifier(prob, num_modifications, num_steps_probs)
         for new_idx, new_nn in enumerate(modifications):
             assert isinstance(new_nn, NeuralNetwork)
             visualise_nn(
                 new_nn,
                 os.path.join(save_dir, '%s_%d' % (save_prefix, new_idx)))
         self.report(
             'With list of %d nns(%s):: #new-networks: %d.' %
             (len(prob), prob[0].nn_class, len(modifications)),
             'test_result')
def main():
    """ Main function. """
    # Obtain the search space
    nn_domain = get_nn_domain_from_constraints('mlp-reg', MAX_NUM_LAYERS,
                                               MIN_NUM_LAYERS, MAX_MASS,
                                               MIN_MASS, MAX_IN_DEGREE,
                                               MAX_OUT_DEGREE, MAX_NUM_EDGES,
                                               MAX_NUM_UNITS_PER_LAYER,
                                               MIN_NUM_UNITS_PER_LAYER)
    # Obtain a worker manager: A worker manager (defined in opt/worker_manager.py) is used
    # to manage (possibly) multiple workers. For a RealWorkerManager, the budget should be
    # given in wall clock seconds.
    worker_manager = RealWorkerManager(GPU_IDS)
    # Obtain a function caller: A function_caller is used to evaluate a function defined on
    # neural network architectures. We have defined the MLPFunctionCaller in
    # demos/mlp_function_caller.py. The train_params can be used to specify additional
    # training parameters such as the learning rate etc.
    train_params = Namespace(data_train_file=get_train_file_name(DATASET))
    func_caller = MLPFunctionCaller(DATASET,
                                    nn_domain,
                                    train_params,
                                    reporter=REPORTER,
                                    tmp_dir=TMP_DIR)

    # Run nasbot
    opt_val, opt_nn, _ = nasbot.nasbot(func_caller,
                                       worker_manager,
                                       BUDGET,
                                       reporter=REPORTER)

    # Print the optimal value and visualise the best network.
    REPORTER.writeln('\nOptimum value found: ' % (opt_val))
    REPORTER.writeln('Optimal network visualised in mlp_opt_network.eps.')
    visualise_nn(opt_nn, 'mlp_opt_network')
Exemple #5
0
 def test_cnn_visualisation(self):
     """ Tests visualisation of the NN. """
     self.report('Testing visualisation of cnns.')
     for idx, cnn in enumerate(self.cnns):
         save_file = os.path.join(self.save_dir, 'cnn_%02d' % (idx))
         save_file_for_pres = os.path.join(self.save_dir_for_pres,
                                           'cnn_%02d' % (idx))
         nn_visualise.visualise_nn(cnn, save_file)
         nn_visualise.visualise_nn(cnn, save_file_for_pres, for_pres=True)
def test_for_orig_vs_modifications(save_dir, save_prefix, old_nn,
                                   get_modifications, constraint_checker,
                                   write_result):
    """ Tests for the original network and the modifications. Also, visualises the networks.
  """
    visualise_nn(old_nn, os.path.join(save_dir, '%s_orig' % (save_prefix)))
    old_nn_copy = deepcopy(old_nn)
    # Get the modified networks.
    new_nns = get_modifications(old_nn)
    # Go through each new network.
    for new_idx, new_nn in enumerate(new_nns):
        assert isinstance(new_nn, NeuralNetwork)
        assert constraint_checker(new_nn)
        visualise_nn(new_nn,
                     os.path.join(save_dir, '%s_%d' % (save_prefix, new_idx)))
    # Finally test if the networks have not changed.
    assert test_if_two_networks_are_equal(old_nn, old_nn_copy)
    write_result(
        '%s (%s):: #new-networks: %d.' %
        (save_prefix, old_nn.nn_class, len(new_nns)), 'test_result')
Exemple #7
0
 def test_mlp_visualisation(self):
     """ Tests visualisation of the NN. """
     self.report('Testing visualisation of mlps.')
     for idx in range(len(self.mlps_reg)):
         # For regression
         reg_save_file = os.path.join(self.save_dir, 'mlp_reg_%02d' % (idx))
         reg_save_file_for_pres = os.path.join(self.save_dir_for_pres,
                                               'mlp_reg_%02d' % (idx))
         nn_visualise.visualise_nn(self.mlps_reg[idx], reg_save_file)
         nn_visualise.visualise_nn(self.mlps_reg[idx],
                                   reg_save_file_for_pres,
                                   for_pres=True)
         # For classification
         cla_save_file = os.path.join(self.save_dir, 'mlp_cla_%02d' % (idx))
         cla_save_file_for_pres = os.path.join(self.save_dir_for_pres,
                                               'mlp_cla_%02d' % (idx))
         nn_visualise.visualise_nn(self.mlps_class[idx], cla_save_file)
         nn_visualise.visualise_nn(self.mlps_class[idx],
                                   cla_save_file_for_pres,
                                   for_pres=True)
Exemple #8
0
 def test_vgg(self):
   """ Unit test for the VGG_net."""
   self.report('Testing the VGG net. ')
   vggnet2 = nn_examples.get_vgg_net(3)
   save_file_prefix = os.path.join(self.save_dir, "vgg-16")
   visualise_nn(vggnet2, save_file_prefix)