コード例 #1
0
def get_n_hidden_units(layer):
    assert (layer != None)
    if keras_layer.is_dense(layer):
        weights = layer.get_weights(
        )  # the second are biases, the first are weights between two layers
        kernel = weights[0]
        hidden_units_pre_layer = kernel.shape[0]
        hidden_units_curr_layer = kernel.shape[1]
        return hidden_units_pre_layer, hidden_units_curr_layer
    else:
        return None
コード例 #2
0
def get_dense_layers(model):
    dense_layers = []
    indexes = []

    if isinstance(model, Sequential):
        for idx, layer in enumerate(model.layers):
            if keras_layer.is_dense(layer):
                dense_layers.append(layer)
                indexes.append(idx)

    return zip(dense_layers, indexes)
コード例 #3
0
def generate_weights(model: Sequential):
    output = ""
    for idx, current_layer in enumerate(model.layers):
        print("")
        print(f"layer {current_layer.name}")
        print(f"layer {current_layer}")
        weights = current_layer.get_weights()  # the second are biases, the first are weights between two layers
        if keras_layer.is_dense(current_layer):
            kernel = weights[0]
            biases = weights[1]

            print(f"#neuron in pre layer = {kernel.shape[0]}")
            print(f"#neuron in next layer = {kernel.shape[1]}")
            print(f"#neuron in bias = {len(biases)}")
            for i in range(kernel.shape[0]):
                for j in range(kernel.shape[1]):
                    output += f"double  {weight(idx - 1, i, j)} = {kernel[i][j]};"
                    output += "\n"

    return output
コード例 #4
0
def generate_weights(model: Sequential):
    output = ""
    for idx, current_layer in enumerate(model.layers):
        print("")
        print(f"layer {current_layer.name}")
        print(f"layer {current_layer}")
        weights = current_layer.get_weights()  # the second are biases, the first are weights between two layers

        if keras_layer.is_dense(current_layer):
            kernel = weights[0]
            output += f"double[][] {current_layer.name}_kernel = new double[{kernel.shape[0]}][{kernel.shape[1]}];\n"
            for i in range(kernel.shape[0]):
                for j in range(kernel.shape[1]):
                    output += f"{current_layer.name}_kernel[{i}][{j}] = {kernel[i][j]}; "

            output += "\n"

            biases = weights[1]  # shape = #filter
            output += f"double[] {current_layer.name}_bias = new double[{biases.shape[0]}];\n"
            for i in range(0, biases.shape[0]):
                output += f"{current_layer.name}_bias[{i}] = {biases[i]}; "
            # print(output)

        elif keras_layer.is_conv2d(current_layer):
            kernel = weights[0]  # shape = (#filter_width, #filter_height, #channel, #filter)
            output += f"double[][][][] {current_layer.name}_kernel = new double[{kernel.shape[0]}][{kernel.shape[1]}][{kernel.shape[2]}][{kernel.shape[3]}];\n"
            for i in range(0, kernel.shape[0]):
                for j in range(0, kernel.shape[1]):
                    for k in range(0, kernel.shape[2]):
                        for h in range(0, kernel.shape[3]):
                            output += f"{current_layer.name}_kernel[{i}][{j}][{k}][{h}] = {kernel[i][j][k][h]}; "

            output += "\n"

            biases = weights[1]  # shape = #filter
            output += f"double[] {current_layer.name}_bias = new double[{biases.shape[0]}];\n"
            for i in range(0, biases.shape[0]):
                output += f"{current_layer.name}_bias[{i}] = {biases[i]}; "
            # print(output)

    return output
コード例 #5
0
def create_constraint_of_propogation_layers(model_object, modified_features, x):
    assert (isinstance(model_object, abstract_dataset))
    smt_constraints = []

    model = model_object.get_model()
    if not keras_model.is_ANN(model):
        return

    for current_layer_idx, current_layer in enumerate(model.layers):
        pre_layer_idx = current_layer_idx - 1
        if keras_layer.is_dense(current_layer):
            smt_constraints.append(f'\n; Dense layer {current_layer.name}')

            weights = current_layer.get_weights()  # the second are biases, the first are weights between two layers
            kernel = weights[0]
            biases = weights[1]
            n_features = kernel.shape[0]
            hidden_units_curr_layer = kernel.shape[1]

            for current_pos in range(hidden_units_curr_layer):
                var = f'l{current_layer_idx}_{current_pos}'
                smt_constraint = ''
                for feature_idx in range(n_features):
                    if current_layer_idx == 0:  # 0: the first layer behind the input layer
                        if feature_idx in modified_features:
                            previous_var = f'(/ feature_{feature_idx} 255)'  # symbolic value
                        else:
                            previous_var = f'{x[feature_idx]}'  # real value
                    else:
                        previous_var = f'l{pre_layer_idx}_{feature_idx}'
                    weight = kernel[feature_idx][current_pos]

                    # weight = weight / NORMALIZATION_FACTOR  # rather than normalizing feature input
                    if feature_idx == 0:
                        smt_constraint = f'(* {previous_var} {weight:.25f}) '
                    else:
                        smt_constraint = f'(+ {smt_constraint} (* {previous_var} {weight:.25f})) '

                smt_constraint = f'(+ {smt_constraint} {biases[current_pos]:.25f}) '
                smt_constraint = f'(assert(= {var} {smt_constraint}))'
                smt_constraints.append(smt_constraint)

        elif keras_activation.is_relu(current_layer):
            units = keras_layer.get_number_of_units(model, current_layer_idx)
            smt_constraints.append(f'\n; is relu layer')
            for unit_idx in range(units):
                '''
                constraint type 2
                v = u>0?x:0
                '''
                prelayer_idx = current_layer_idx - 1
                smt_constraint = f'(= l{current_layer_idx}_{unit_idx} (ite (>= l{prelayer_idx}_{unit_idx} 0) l{prelayer_idx}_{unit_idx} 0))'
                smt_constraint = f'(assert{smt_constraint})'
                smt_constraints.append(smt_constraint)

        # elif keras_layer.is_dense(current_layer):
        #     pre_layer_idx = current_layer_idx - 1
        #     pre_layer = model.layers[pre_layer_idx]
        #
        #     weights = current_layer.get_weights()  # the second are biases, the first are weights between two layers
        #     kernel = weights[0]
        #     biases = weights[1]
        #     hidden_units_pre_layer = kernel.shape[0]
        #     hidden_units_curr_layer = kernel.shape[1]
        #
        #
        #
        #     # if keras_layer.is_dense(pre_layer):  # instance of Dense
        #         # the current layer and the previous layer is dense
        #     for current_pos in range(hidden_units_curr_layer):
        #         # constraints of a hidden unit layer include (1) bias, and (2) weight
        #         var = f'u_{current_layer_idx}_{current_pos}'
        #
        #         '''
        #         type constraint 2
        #         '''
        #         smt_constraint = f'{var} = '
        #         for feature_idx in range(hidden_units_pre_layer):
        #             previous_var = f'v_{pre_layer_idx}_{feature_idx}'
        #             weight = kernel[feature_idx][current_pos]
        #             if feature_idx == 0:
        #                 smt_constraint = f'(* {previous_var} {weight:.25f}) '
        #             else:
        #                 smt_constraint = f'(+ {smt_constraint} (* {previous_var} {weight:.25f})) '
        #
        #         # smt_constraint = f'(+ {smt_constraint} {biases[current_layer_idx]:.25f}) '
        #         smt_constraint = f'(assert {smt_constraint})'
        #         smt_constraints.append(smt_constraint)

        # elif keras_layer.is_activation(pre_layer) or keras_layer.is_dropout(
        #         pre_layer):  # instance of keras.layers.core.Dense
        #     for current_pos in range(hidden_units_curr_layer):
        #         var = f'u_{current_layer_idx}_{current_pos}'
        #         previous_var = f'v_{current_layer_idx - 1}_{current_pos}'
        #
        #         smt_constraint = f'(assert(= {var} {previous_var}))'
        #         smt_constraints.append(smt_constraint)

        # elif keras_layer.is_activation(current_layer):
        #     pre_layer_idx = current_layer_idx - 1
        #     pre_layer = model.layers[pre_layer_idx]
        #
        #     smt_constraints.append(f'\n; (output of {pre_layer.name}, input of {current_layer.name})')
        #
        #     units = keras_layer.get_number_of_units(model, pre_layer_idx)
        #     for unit_idx in range(units):
        #         smt_constraint = f'(assert(= v_{pre_layer_idx}_{unit_idx} u_{current_layer_idx}_{unit_idx}))'
        #         smt_constraints.append(smt_constraint)

    return smt_constraints
コード例 #6
0
def is_dense_and_activity(layer):
    assert (layer != None)
    if keras_layer.is_dense(layer) and keras_activation.is_activation(layer):
        return True
コード例 #7
0
        lines = f.readlines()
        for line in lines:
            code += line;
    # print(generate_weights(model))
    '''
    generate predict()
    '''
    code += generate_prototype(inputlayer) + "\n"
    code += "{\n"
    code += normalize_features(inputlayer) + "\n"

    for idx, current_layer in enumerate(model.layers):
        if keras_layer.is_inputlayer(current_layer):
            continue;

        elif keras_layer.is_dense(current_layer):
            code += handle_dense_layer(model.layers[idx], idx, model)
            code += "\n"
        elif keras_layer.is_activation(current_layer):
            if keras_activation.is_relu(current_layer):
                code += handle_relu_layer(model.layers[idx], idx, model)
                code += "\n"
            elif keras_activation.is_softmax(current_layer):
                code += handle_softmax_layer(model.layers[idx], idx, model)
                code += "\n"

    code += print_all_vars();
    code += "}\n\n"

    '''
    initialize testpath