Ejemplo n.º 1
0
def get_conv_dense_model():
    graph = Graph((32, 32, 3), False)
    output_node_id = 0

    output_node_id = graph.add_layer(StubReLU(), output_node_id)
    output_node_id = graph.add_layer(StubConv(3, 3, 3), output_node_id)
    output_node_id = graph.add_layer(StubBatchNormalization(3), output_node_id)

    output_node_id = graph.add_layer(StubReLU(), output_node_id)
    output_node_id = graph.add_layer(StubConv(3, 3, 3), output_node_id)
    output_node_id = graph.add_layer(StubBatchNormalization(3), output_node_id)

    output_node_id = graph.add_layer(StubFlatten(), output_node_id)
    output_node_id = graph.add_layer(StubDropout(Constant.DENSE_DROPOUT_RATE), output_node_id)

    output_node_id = graph.add_layer(StubReLU(), output_node_id)
    output_node_id = graph.add_layer(StubDense(graph.node_list[output_node_id].shape[0], 5),
                                     output_node_id)

    output_node_id = graph.add_layer(StubReLU(), output_node_id)
    output_node_id = graph.add_layer(StubDense(5, 5), output_node_id)
    graph.add_layer(StubSoftmax(), output_node_id)

    graph.produce_model().set_weight_to_graph()

    return graph
Ejemplo n.º 2
0
    def generate(self,
                 model_len=Constant.MODEL_LEN,
                 model_width=Constant.MODEL_WIDTH):
        pooling_len = int(model_len / 4)
        graph = Graph(self.input_shape, False)
        temp_input_channel = self.input_shape[-1]
        output_node_id = 0
        for i in range(model_len):
            output_node_id = graph.add_layer(StubReLU(), output_node_id)
            output_node_id = graph.add_layer(
                StubConv(temp_input_channel, model_width, kernel_size=3),
                output_node_id)
            output_node_id = graph.add_layer(
                StubBatchNormalization(model_width), output_node_id)
            temp_input_channel = model_width
            if pooling_len == 0 or ((i + 1) % pooling_len == 0
                                    and i != model_len - 1):
                output_node_id = graph.add_layer(StubPooling(), output_node_id)

        output_node_id = graph.add_layer(StubFlatten(), output_node_id)
        output_node_id = graph.add_layer(
            StubDropout(Constant.CONV_DROPOUT_RATE), output_node_id)
        output_node_id = graph.add_layer(
            StubDense(graph.node_list[output_node_id].shape[0], model_width),
            output_node_id)
        output_node_id = graph.add_layer(StubReLU(), output_node_id)
        output_node_id = graph.add_layer(
            StubDense(model_width, self.n_classes), output_node_id)
        graph.add_layer(StubSoftmax(), output_node_id)
        return graph
Ejemplo n.º 3
0
def to_stub_model(model, weighted=False):
    node_count = 0
    tensor_dict = {}
    ret = StubModel()
    ret.input_shape = model.input_shape
    for layer in model.layers:
        if isinstance(layer.input, list):
            input_nodes = layer.input
        else:
            input_nodes = [layer.input]

        for node in input_nodes + [layer.output]:
            if node not in tensor_dict:
                tensor_dict[node] = StubTensor(get_int_tuple(node.shape))
                node_count += 1

        if isinstance(layer.input, list):
            input_id = []
            for node in layer.input:
                input_id.append(tensor_dict[node])
        else:
            input_id = tensor_dict[layer.input]
        output_id = tensor_dict[layer.output]

        if is_conv_layer(layer):
            temp_stub_layer = StubConv(layer.filters, layer.kernel_size,
                                       layer.__class__, input_id, output_id)
        elif isinstance(layer, Dense):
            temp_stub_layer = StubDense(layer.units, layer.activation,
                                        input_id, output_id)
        elif isinstance(layer, WeightedAdd):
            temp_stub_layer = StubWeightedAdd(input_id, output_id)
        elif isinstance(layer, Concatenate):
            temp_stub_layer = StubConcatenate(input_id, output_id)
        elif isinstance(layer, BatchNormalization):
            temp_stub_layer = StubBatchNormalization(input_id, output_id)
        elif isinstance(layer, Activation):
            temp_stub_layer = StubActivation(layer.activation, input_id,
                                             output_id)
        elif isinstance(layer, InputLayer):
            temp_stub_layer = StubLayer(input_id, output_id)
        elif isinstance(layer, Flatten):
            temp_stub_layer = StubFlatten(input_id, output_id)
        elif isinstance(layer, Dropout):
            temp_stub_layer = StubDropout(layer.rate, input_id, output_id)
        elif is_pooling_layer(layer):
            temp_stub_layer = StubPooling(layer.__class__, input_id, output_id)
        elif is_global_pooling_layer(layer):
            temp_stub_layer = StubGlobalPooling(layer.__class__, input_id,
                                                output_id)
        else:
            raise TypeError("The layer {} is illegal.".format(layer))
        if weighted:
            temp_stub_layer.set_weights(layer.get_weights())
        ret.add_layer(temp_stub_layer)
    ret.inputs = [tensor_dict[model.inputs[0]]]
    ret.outputs = [tensor_dict[model.outputs[0]]]
    return ret
Ejemplo n.º 4
0
def get_add_skip_model():
    graph = Graph((5, 5, 3), False)
    output_node_id = 0

    output_node_id = graph.add_layer(StubReLU(), output_node_id)
    output_node_id = graph.add_layer(StubConv(3, 3, 3), output_node_id)
    output_node_id = graph.add_layer(StubBatchNormalization(3), output_node_id)
    output_node_id = graph.add_layer(StubDropout(constant.CONV_DROPOUT_RATE), output_node_id)

    output_node_id = graph.add_layer(StubReLU(), output_node_id)
    output_node_id = graph.add_layer(StubConv(3, 3, 3), output_node_id)
    output_node_id = graph.add_layer(StubBatchNormalization(3), output_node_id)
    output_node_id = graph.add_layer(StubDropout(constant.CONV_DROPOUT_RATE), output_node_id)

    temp_node_id = output_node_id

    output_node_id = graph.add_layer(StubReLU(), output_node_id)
    output_node_id = graph.add_layer(StubConv(3, 3, 3), output_node_id)
    output_node_id = graph.add_layer(StubBatchNormalization(3), output_node_id)
    output_node_id = graph.add_layer(StubDropout(constant.CONV_DROPOUT_RATE), output_node_id)

    temp_node_id = graph.add_layer(StubConv(3, 3, 1), temp_node_id)
    output_node_id = graph.add_layer(StubAdd(), [output_node_id, temp_node_id])

    temp_node_id = output_node_id

    output_node_id = graph.add_layer(StubReLU(), output_node_id)
    output_node_id = graph.add_layer(StubConv(3, 3, 3), output_node_id)
    output_node_id = graph.add_layer(StubBatchNormalization(3), output_node_id)
    output_node_id = graph.add_layer(StubDropout(constant.CONV_DROPOUT_RATE), output_node_id)

    temp_node_id = graph.add_layer(StubConv(3, 3, 1), temp_node_id)
    output_node_id = graph.add_layer(StubAdd(), [output_node_id, temp_node_id])

    output_node_id = graph.add_layer(StubReLU(), output_node_id)
    output_node_id = graph.add_layer(StubConv(3, 3, 3), output_node_id)
    output_node_id = graph.add_layer(StubBatchNormalization(3), output_node_id)
    output_node_id = graph.add_layer(StubDropout(constant.CONV_DROPOUT_RATE), output_node_id)

    output_node_id = graph.add_layer(StubFlatten(), output_node_id)

    output_node_id = graph.add_layer(StubReLU(), output_node_id)
    output_node_id = graph.add_layer(StubDense(graph.node_list[output_node_id].shape[0], 5),
                                     output_node_id)
    output_node_id = graph.add_layer(StubDropout(constant.DENSE_DROPOUT_RATE), output_node_id)

    output_node_id = graph.add_layer(StubReLU(), output_node_id)
    output_node_id = graph.add_layer(StubDense(5, 5), output_node_id)
    output_node_id = graph.add_layer(StubDropout(constant.DENSE_DROPOUT_RATE), output_node_id)
    graph.add_layer(StubSoftmax(), output_node_id)

    graph.produce_model().set_weight_to_graph()

    return graph