def generate_search_space(num_nodes_per_cell, num_normal_cells, num_reduction_cells, init_filters, stem_multiplier): global global_vars, hp_sharer global_vars = {} hp_sharer = hp.HyperparameterSharer() hp_sharer.register('drop_path_keep_prob', lambda: D([.7], name='drop_path_keep_prob')) stem_in, stem_out = stem(int(init_filters * stem_multiplier)) progress_in, progress_out = mo.identity() global_vars['progress'] = progress_out['Out'] normal_cell_fn = create_cell_generator(num_nodes_per_cell, False) reduction_cell_fn = create_cell_generator(num_nodes_per_cell, True) total_cells = num_normal_cells + num_reduction_cells hasReduction = [False] * num_normal_cells for i in range(num_reduction_cells): hasReduction[int( float(i + 1) / (num_reduction_cells + 1) * num_normal_cells)] = True inputs = [stem_out, stem_out] filters = init_filters aux_loss_idx = int( float(num_reduction_cells) / (num_reduction_cells + 1) * num_normal_cells) - 1 outs = {} cells_created = 0.0 for i in range(num_normal_cells): if hasReduction[i]: filters *= 2 connect_new_cell( reduction_cell_fn(filters, (cells_created + 1) / total_cells), inputs) cells_created += 1.0 connect_new_cell( normal_cell_fn(filters, (cells_created + 1) / total_cells), inputs) cells_created += 1.0 if i == aux_loss_idx: aux_in, aux_out = aux_logits() aux_in['In'].connect(inputs[-1]['Out']) outs['Out0'] = aux_out['Out'] _, final_out = mo.siso_sequential([(None, inputs[-1]), relu(), global_pool2d(), dropout(D([1.0])), fc_layer(D([10]))]) outs['Out1'] = final_out['Out'] return {'In0': stem_in['In'], 'In1': progress_in['In']}, outs
import tensorflow as tf from collections import OrderedDict import deep_architect.core as co import deep_architect.hyperparameters as hp import deep_architect.helpers.tfeager as htfe import deep_architect.modules as mo from search_spaces.tfe_ops import (relu, batch_normalization, conv2d, separable_conv2d, avg_pool2d, max_pool2d, min_pool2d, fc_layer, global_pool2d, dropout, add, flatten) from deep_architect.hyperparameters import Discrete as D global_vars = {} hp_sharer = hp.HyperparameterSharer() def cell(input_fn, node_fn, combine_fn, unused_combine_fn, num_nodes, hyperparameters): def substitution_fn(**dh): c_ins, c_outs = input_fn() nodes = [c_outs['Out0'], c_outs['Out1']] used_node = [False] * (num_nodes + 2) for i in range(num_nodes): # Get indices of hidden states to be combined idx0 = dh[str(i) + '_0'] idx1 = dh[str(i) + '_1'] # Transform hidden states h0 = node_fn(idx0, i, 0)