def aux_logits(): return mo.siso_sequential([ relu(), avg_pool2d(D([5]), D([3]), D(['VALID'])), conv2d(D([128]), D([1])), batch_normalization(), relu(), global_convolution(D([768])), batch_normalization(), relu(), flatten(), fc_layer(D([10])) ])
def intermediate_node_fn(num_inputs, filters): return mo.siso_sequential([ add(num_inputs), conv2d(D([filters]), D([3])), batch_normalization(), relu() ])
def generate(filters): return cell( lambda channels: mo.siso_sequential( [conv2d(D([channels]), D([1])), batch_normalization(), relu()]), lambda num_inputs, node_id, channels: intermediate_node_fn( num_inputs, node_id, channels, cell_ops), concat, h_connections, 5, filters)
def conv_op(filters, filter_size, stride, dilation_rate, spatial_separable): if spatial_separable: return mo.siso_sequential([ conv2d(D([filters]), D([[1, filter_size]]), D([[1, stride]])), batch_normalization(), relu(), conv2d(D([filters]), D([[filter_size, 1]]), D([[stride, 1]])), ]) else: return conv2d(D([filters]), D([filter_size]), D([stride]), D([dilation_rate]))
def intermediate_node_fn(num_inputs, node_id, filters, cell_ops): return mo.siso_sequential([ add(num_inputs), mo.siso_or( { 'conv1': lambda: conv2d(D([filters]), D([1])), 'conv3': lambda: conv2d(D([filters]), D([3])), 'max3': lambda: max_pool2d(D([3])) }, cell_ops[node_id]), batch_normalization(), relu() ])
def generate_search_space(num_nodes_per_cell, num_normal_cells, num_reduction_cells, init_filters, stem_multiplier): global global_vars, hp_sharer global_vars = {} hp_sharer = hp.HyperparameterSharer() hp_sharer.register('drop_path_keep_prob', lambda: D([.7], name='drop_path_keep_prob')) stem_in, stem_out = stem(int(init_filters * stem_multiplier)) progress_in, progress_out = mo.identity() global_vars['progress'] = progress_out['Out'] normal_cell_fn = create_cell_generator(num_nodes_per_cell, False) reduction_cell_fn = create_cell_generator(num_nodes_per_cell, True) total_cells = num_normal_cells + num_reduction_cells hasReduction = [False] * num_normal_cells for i in range(num_reduction_cells): hasReduction[int( float(i + 1) / (num_reduction_cells + 1) * num_normal_cells)] = True inputs = [stem_out, stem_out] filters = init_filters aux_loss_idx = int( float(num_reduction_cells) / (num_reduction_cells + 1) * num_normal_cells) - 1 outs = {} cells_created = 0.0 for i in range(num_normal_cells): if hasReduction[i]: filters *= 2 connect_new_cell( reduction_cell_fn(filters, (cells_created + 1) / total_cells), inputs) cells_created += 1.0 connect_new_cell( normal_cell_fn(filters, (cells_created + 1) / total_cells), inputs) cells_created += 1.0 if i == aux_loss_idx: aux_in, aux_out = aux_logits() aux_in['In'].connect(inputs[-1]['Out']) outs['Out0'] = aux_out['Out'] _, final_out = mo.siso_sequential([(None, inputs[-1]), relu(), global_pool2d(), dropout(D([1.0])), fc_layer(D([10]))]) outs['Out1'] = final_out['Out'] return {'In0': stem_in['In'], 'In1': progress_in['In']}, outs
def generate_stage(stage_num, num_nodes, filters, filter_size): h_connections = [ Bool(name='%d_in_%d_%d' % (stage_num, in_id, out_id)) for (in_id, out_id) in itertools.combinations(range(1, num_nodes + 1), 2) ] return genetic_stage( lambda: mo.siso_sequential([ conv2d(D([filters]), D([filter_size])), batch_normalization(), relu() ]), lambda num_inputs: intermediate_node_fn(num_inputs, filters), lambda num_inputs: intermediate_node_fn(num_inputs, filters), h_connections, num_nodes)
def forward_fn(di, isTraining=True): inp = relu(di['In0']) if add_relu else di['In0'] if height == final_height and channels == final_channels: out = inp elif height == final_height: out = conv(inp) out = bn(out) else: path1 = avg_pool(inp) path1 = conv1(path1) path2 = slice_layer(pad(inp)) path2 = avg_pool(path2) path2 = conv2(path2) out = concat([path1, path2]) out = bn(out) return {'Out': out}
def wrap_relu_batch_norm(op): return mo.siso_sequential([relu(), op, batch_normalization()])
def stem(): return mo.siso_sequential([ conv2d(D([128]), D([3])), batch_normalization(), relu(), ])