Esempio n. 1
0
def create_final_layer(finetune_net, custom_network, num_classes, set=1):
    '''
    Create final sub-network 

    Args:
        finetune_net (network): Initial base network
        custom_network (list):  List of dicts containing details on appeded layers to base netwoek in transfer learning
        num_classes (int): Number of classes in the dataset
        set (int): Select the right set to find the details of outermost layer

    Returns:
        network: Updated base network with appended custom additions
    '''
    last_layer_name = custom_network[len(custom_network) - 1]["name"]

    if (set == 1):
        if (last_layer_name == "linear"):
            with finetune_net.name_scope():
                for i in range(len(custom_network) - 1):
                    layer = get_layer(custom_network[i])
                    finetune_net.features.add(layer)
                    finetune_net.features[len(finetune_net.features) -
                                          1].initialize(init.Xavier(), ctx=ctx)
                finetune_net.output = get_layer(
                    custom_network[len(custom_network) - 1])
                finetune_net.output.initialize(init.Xavier(), ctx=ctx)
        else:
            with finetune_net.name_scope():
                for i in range(len(custom_network) - 2):
                    layer = get_layer(custom_network[i])
                    finetune_net.features.add(layer)
                    finetune_net.features[len(finetune_net.features) -
                                          1].initialize(init.Xavier(), ctx=ctx)
                finetune_net.output = get_final_layer(
                    custom_network[len(custom_network) - 2],
                    activation=custom_network[len(custom_network) - 1]['name'])
                finetune_net.output.initialize(init.Xavier(), ctx=ctx)

    if (set == 2):
        net = nn.HybridSequential()
        with net.name_scope():
            for i in range(len(custom_network)):
                layer = get_layer(custom_network[i])
                net.add(layer)
        with finetune_net.name_scope():
            finetune_net.output = net
            finetune_net.output.initialize(init.Xavier(), ctx=ctx)

    if (set == 3):
        msg = "Custom model addition for - Set 3 models: Not Implemented.\n"
        msg += "Set 3 models - {}\n".format(set3)
        msg += "Ignoring added layers\n"
        ConstraintWarning(msg)

    return finetune_net
Esempio n. 2
0
def create_final_layer(finetune_net, custom_network, num_classes, set=1):
    last_layer_name = custom_network[len(custom_network) - 1]["name"]

    if (set == 1):
        if (last_layer_name == "linear"):
            with finetune_net.name_scope():
                for i in range(len(custom_network) - 1):
                    layer = get_layer(custom_network[i])
                    finetune_net.features.add(layer)
                    finetune_net.features[len(finetune_net.features) -
                                          1].initialize(init.Xavier(), ctx=ctx)
                finetune_net.output = get_layer(
                    custom_network[len(custom_network) - 1])
                finetune_net.output.initialize(init.Xavier(), ctx=ctx)
        else:
            with finetune_net.name_scope():
                for i in range(len(custom_network) - 2):
                    layer = get_layer(custom_network[i])
                    finetune_net.features.add(layer)
                    finetune_net.features[len(finetune_net.features) -
                                          1].initialize(init.Xavier(), ctx=ctx)
                finetune_net.output = get_final_layer(
                    custom_network[len(custom_network) - 2],
                    activation=custom_network[len(custom_network) - 1]['name'])
                finetune_net.output.initialize(init.Xavier(), ctx=ctx)

    if (set == 2):
        net = nn.HybridSequential()
        with net.name_scope():
            for i in range(len(custom_network)):
                layer = get_layer(custom_network[i])
                net.add(layer)
        with finetune_net.name_scope():
            finetune_net.output = net
            finetune_net.output.initialize(init.Xavier(), ctx=ctx)

    if (set == 3):
        msg = "Custom model addition for - Set 3 models: Not Implemented.\n"
        msg += "Set 3 models - {}\n".format(set3)
        msg += "Ignoring added layers\n"
        ConstraintWarning(msg)

    return finetune_net