def get_space(cache): space = HyperSpace() with space.as_default(): name_prefix = 'test_' filters = 64 in1 = Input(shape=( 28, 28, 1, )) in2 = Input(shape=( 28, 28, 1, )) ic1 = InputChoice([in1, in2], 1)([in1, in2]) or1 = ModuleChoice([ sepconv5x5(name_prefix, filters), sepconv3x3(name_prefix, filters), avgpooling3x3(name_prefix, filters), maxpooling3x3(name_prefix, filters), identity(name_prefix) ])(ic1) space.set_inputs([in1, in2]) space.weights_cache = cache return space
def cnn_search_space(input_shape, output_units, output_activation='softmax', block_num_choices=[2, 3, 4, 5, 6], activation_choices=['relu'], filters_choices=[32, 64], kernel_size_choices=[(1, 1), (3, 3)]): space = HyperSpace() with space.as_default(): hp_use_bn = Bool() hp_pooling = Choice(list(range(2))) hp_filters = Choice(filters_choices) hp_kernel_size = Choice(kernel_size_choices) hp_fc_units = Choice([1024, 2048, 4096]) if len(activation_choices) == 1: hp_activation = activation_choices[0] else: hp_activation = Choice(activation_choices) hp_bn_act = Choice([seq for seq in itertools.permutations(range(2))]) input = Input(shape=input_shape) blocks = Repeat( lambda step: conv_block( block_no=step, hp_pooling=hp_pooling, hp_filters=hp_filters, hp_kernel_size=hp_kernel_size, hp_use_bn=hp_use_bn, hp_activation=hp_activation, hp_bn_act=hp_bn_act), repeat_times=block_num_choices)(input) x = Flatten()(blocks) x = Dense(units=hp_fc_units, activation=hp_activation, name='fc1')(x) x = Dense(units=hp_fc_units, activation=hp_activation, name='fc2')(x) x = Dense(output_units, activation=output_activation, name='predictions')(x) return space
def get_space(): space = HyperSpace() with space.as_default(): filters = 64 in1 = Input(shape=( 28, 28, 1, )) conv_layer(hp_dict, 'normal', 0, [in1, in1], filters, 5) space.set_inputs(in1) return space
def get_space(): space = HyperSpace() with space.as_default(): filters = 64 in1 = Input(shape=( 28, 28, 1, ), dtype='float32') conv_node(hp_dict, 'normal', 0, 0, [in1, in1], filters) space.set_inputs(in1) return space
def enas_micro_search_space(arch='NRNR', input_shape=(28, 28, 1), init_filters=64, node_num=4, data_format=None, classes=10, classification_dropout=0, hp_dict={}, use_input_placeholder=True, weights_cache=None): space = HyperSpace() with space.as_default(): if use_input_placeholder: input = Input(shape=input_shape, name='0_input') else: input = None stem, input = stem_op(input, init_filters, data_format) node0 = stem node1 = stem reduction_no = 0 normal_no = 0 for l in arch: if l == 'N': normal_no += 1 type = 'normal' cell_no = normal_no is_reduction = False else: reduction_no += 1 type = 'reduction' cell_no = reduction_no is_reduction = True filters = (2**reduction_no) * init_filters if is_reduction: node0 = FactorizedReduction( filters, f'{normal_no + reduction_no}_{type}_C{cell_no}_0', data_format)(node0) node1 = FactorizedReduction( filters, f'{normal_no + reduction_no}_{type}_C{cell_no}_1', data_format)(node1) x = conv_layer(hp_dict, f'{normal_no + reduction_no}_{type}', cell_no, [node0, node1], filters, node_num, is_reduction) node0 = node1 node1 = x logit = classification(x, classes, classification_dropout, data_format) space.set_inputs(input) if weights_cache is not None: space.weights_cache = weights_cache return space
def get_space(): space = HyperSpace() with space.as_default(): filters = 64 in1 = Input(shape=( 28, 28, 1, )) conv = conv_cell(hp_dict, 'normal', 0, 0, 'L', [in1, in1], filters) space.set_inputs([in1, in1]) space.set_outputs(conv) return space