예제 #1
0
def aux_logits():
    return mo.siso_sequential([
        relu(),
        avg_pool2d(D([5]), D([3]), D(['VALID'])),
        conv2d(D([128]), D([1])),
        batch_normalization(),
        relu(),
        global_convolution(D([768])),
        batch_normalization(),
        relu(),
        flatten(),
        fc_layer(D([10]))
    ])
예제 #2
0
def intermediate_node_fn(num_inputs, filters):
    return mo.siso_sequential([
        add(num_inputs),
        conv2d(D([filters]), D([3])),
        batch_normalization(),
        relu()
    ])
예제 #3
0
def conv_op(filters, filter_size, stride, dilation_rate, spatial_separable):
    if spatial_separable:
        return mo.siso_sequential([
            conv2d(D([filters]), D([[1, filter_size]]), D([[1, stride]])),
            batch_normalization(),
            relu(),
            conv2d(D([filters]), D([[filter_size, 1]]), D([[stride, 1]])),
        ])
    else:
        return conv2d(D([filters]), D([filter_size]), D([stride]),
                      D([dilation_rate]))
예제 #4
0
def generate_search_space(num_nodes_per_cell, num_normal_cells,
                          num_reduction_cells, init_filters, stem_multiplier):
    global global_vars, hp_sharer
    global_vars = {}
    hp_sharer = hp.HyperparameterSharer()
    hp_sharer.register('drop_path_keep_prob',
                       lambda: D([.7], name='drop_path_keep_prob'))
    stem_in, stem_out = stem(int(init_filters * stem_multiplier))
    total_steps_in, total_steps_out = mo.identity()
    global_vars['total_steps'] = total_steps_out['out']
    normal_cell_fn = create_cell_generator(num_nodes_per_cell, False)
    reduction_cell_fn = create_cell_generator(num_nodes_per_cell, True)

    total_cells = num_normal_cells + num_reduction_cells
    has_reduction = [False] * num_normal_cells
    for i in range(num_reduction_cells):
        has_reduction[int(
            float(i + 1) / (num_reduction_cells + 1) *
            num_normal_cells)] = True

    inputs = [stem_out, stem_out]
    filters = init_filters
    aux_loss_idx = int(
        float(num_reduction_cells) /
        (num_reduction_cells + 1) * num_normal_cells) - 1

    outs = {}
    cells_created = 0.0
    for i in range(num_normal_cells):
        if has_reduction[i]:
            filters *= 2
            connect_new_cell(
                reduction_cell_fn(filters, (cells_created + 1) / total_cells),
                inputs)
            cells_created += 1.0
        connect_new_cell(
            normal_cell_fn(filters, (cells_created + 1) / total_cells), inputs)
        cells_created += 1.0
        if i == aux_loss_idx:
            aux_in, aux_out = aux_logits()
            aux_in['in'].connect(inputs[-1]['out'])
            outs['out0'] = aux_out['out']
    _, final_out = mo.siso_sequential([(None, inputs[-1]),
                                       relu(),
                                       global_pool2d(),
                                       dropout(D([1.0])),
                                       fc_layer(D([10]))])
    outs['out1'] = final_out['out']
    return {'in0': stem_in['in'], 'in1': total_steps_in['in']}, outs
예제 #5
0
def generate_stage(stage_num, num_nodes, filters, filter_size):
    h_connections = [
        Bool(name='%d_in_%d_%d' % (stage_num, in_id, out_id))
        for (in_id,
             out_id) in itertools.combinations(range(1, num_nodes + 1), 2)
    ]

    return genetic_stage(
        lambda: mo.siso_sequential([
            conv2d(D([filters]), D([filter_size])),
            batch_normalization(),
            relu()
        ]), lambda num_inputs: intermediate_node_fn(num_inputs, filters), lambda
        num_inputs: intermediate_node_fn(num_inputs, filters), h_connections,
        num_nodes)
예제 #6
0
 def forward_fn(di, is_training=True):
     inp = relu(di['in0']) if add_relu else di['in0']
     if height == final_height and channels == final_channels:
         out = inp
     elif height == final_height:
         out = conv(inp)
         out = bn(out)
     else:
         path1 = avg_pool(inp)
         path1 = conv1(path1)
         path2 = slice_layer(pad(inp))
         path2 = avg_pool(path2)
         path2 = conv2(path2)
         out = concat([path1, path2])
         out = bn(out)
     return {'out': out}
예제 #7
0
def wrap_relu_batch_norm(op):
    return mo.siso_sequential([relu(), op, batch_normalization()])