Esempio n. 1
0
def adjust_layer_after_split_pos_neg(network: Network,
                                     layer_index: int = FIRST_POS_NEG_LAYER
                                     ) -> None:
    # debug_print("adjust_layer_after_split_pos_neg")
    cur_layer = network.layers[layer_index]
    next_layer = network.layers[layer_index + 1]
    for node in cur_layer.nodes:
        node.new_out_edges = []
    for next_node in next_layer.nodes:
        next_node.new_in_edges = []
        for cur_node in cur_layer.nodes:
            for out_edge in cur_node.out_edges:
                for suffix in ["", "_pos", "_neg"]:
                    if out_edge.dest + suffix == next_node.name:
                        weight = out_edge.weight
                        edge = Edge(cur_node.name, next_node.name, weight)
                        cur_node.new_out_edges.append(edge)
                        next_node.new_in_edges.append(edge)
        next_node.in_edges = next_node.new_in_edges
        del next_node.new_in_edges
    for node in cur_layer.nodes:
        node.out_edges = node.new_out_edges
        del node.new_out_edges
    if VERBOSE:
        debug_print("after adjust_layer_after_split_pos_neg()")
        print(network)
Esempio n. 2
0
def abstract_network(network: Network,
                     do_preprocess: bool = True,
                     visualize: bool = False,
                     verbose: bool = VERBOSE) -> Network:
    if VERBOSE:
        debug_print("original net:")
        print(network)
    if do_preprocess:
        preprocess(network)
    next_layer_part2union = {}
    for i in range(len(network.layers) - 1, FIRST_ABSTRACT_LAYER - 1, -1):
        layer = network.layers[i]
        next_layer_part2union = layer.abstract(network.name2node_map,
                                               next_layer_part2union)
        # update name2node_map - add new union nodes and remove inner nodes
        # removal precedes addition for equal names case (e.g output layer)
        for part in next_layer_part2union.keys():
            del network.name2node_map[part]
        network.generate_name2node_map()
        # # print (i)
        # if visualize:
        #     title = "after layer {} test_abstraction".format(i)
        #     visualize_network(network_layers=network.layers,
        #                       title=title,
        #                       next_layer_part2union=next_layer_part2union,
        #                       debug=False)
        # if verbose:
        #     debug_print("net after abstract {}'th layer:".format(i))
        #     print(network)
    finish_abstraction(network, next_layer_part2union, verbose=verbose)
    return network
Esempio n. 3
0
def preprocess(network:Network) -> None:
    """
    pre-process networdk in two stages: split to pos/neg, then split to inc/dec
    :param network: Network before pre-process (nodes without special types)
    :return: Network after pre-process (nodes with special types pos/neg, inc/dec)
    """
    # split to pos/neg
    preprocess_split_pos_neg(network)
    # fill_zero_edges(network)
    # from core.visualization.visualize_network import visualize_network
    # split to inc/dec
    preprocess_split_inc_dec(network)
    # fill_zero_edges(network)
    if VERBOSE:
        debug_print("after preprocess")
    # visualize_network(network_layers=network.layers, title="after preprocess w/o zero edges")
    fill_zero_edges(network)

        # print(network)
        # self.visualize(title="after preprocess")
    # visualize_network(network_layers=network.layers, title="after preprocess w/ zero edges", out_image_path="/home/xiaoze/Desktop/images/1.png", debug=True)

    # generate a copy of the original pre-processed network for later use
    network.orig_layers = copy.deepcopy(network.layers)
    network.orig_name2node_map = copy.deepcopy(network.name2node_map)
    network.weights = network.generate_weights()
    network.biases = network.generate_biases()
Esempio n. 4
0
def preprocess_split_inc_dec(network: Network) -> None:
    """
    split net nodes to increasing/decreasing nodes
    preprocess all layers except input layer (from last to first)
    """
    if VERBOSE:
        debug_print("preprocess_split_inc_dec()")
    for i in range(len(network.layers) - 1, FIRST_INC_DEC_LAYER, -1):
        network.layers[i].split_inc_dec(network.name2node_map)
    network.generate_name2node_map()
    adjust_layer_after_split_inc_dec(network, layer_index=FIRST_INC_DEC_LAYER)
    if VERBOSE:
        debug_print("after preprocess_split_inc_dec()")
        print(network)
Esempio n. 5
0
def preprocess_split_pos_neg(network: Network) -> None:
    """
    split net nodes to nodes with only positive/negative out edges
    preprocess all hidden layers (from last to first), then adjust input
    layer
    """
    if VERBOSE:
        debug_print("preprocess_split_pos_neg()")
    # orig_input_layer = copy.deepcopy(network.layers[0])
    for i in range(len(network.layers) - 2, FIRST_POS_NEG_LAYER, -1):
        network.layers[i].split_pos_neg(network.name2node_map)
    # splited_input_layer = self.layers[0]
    # for node in orig_input_layer.nodes:
    #    node.out_edges = []
    #    for splitted_node in splited_input_layer:
    #        if splitted_node.name[:-4] == node.name:  # suffix=_oos/_neg
    #            edge = Edge(src=node.name, dest=splitted_node.name, weight=1.0)
    #            node.out_edges.append(edge)
    #            splitted_node.in_edges.append(edge)

    network.generate_name2node_map()
    # print(self)
    adjust_layer_after_split_pos_neg(network, layer_index=FIRST_POS_NEG_LAYER)
Esempio n. 6
0
def one_experiment(nnet_filename, is_tiny, refinement_type, abstraction_type,
                   epsilon, lower_bound, preprocess_orig_net,
                   refinement_sequence_length, abstraction_sequence_length,
                   results_directory, property_id=consts.PROPERTY_ID,
                   verbose=consts.VERBOSE):
    if verbose:
        debug_print("one_experiment_cegarabou({})".format(json.dumps([nnet_filename, is_tiny, refinement_type,
                                                                      abstraction_type, epsilon, lower_bound,
                                                                      property_id, preprocess_orig_net])))

    if is_tiny:
        example_nets_dir_path = consts.PATH_TO_MARABOU_ACAS_EXAMPLES
    else:
        example_nets_dir_path = consts.PATH_TO_MARABOU_APPLICATIONS_ACAS_EXAMPLES
    fullname = os.path.join(example_nets_dir_path, nnet_filename)

    if not os.path.exists(results_directory):
        os.makedirs(results_directory)
    results_filename = generate_results_filename(nnet_filename=nnet_filename,
                                                 is_tiny=is_tiny,
                                                 refinement_type=refinement_type,
                                                 abstraction_type=abstraction_type,
                                                 epsilon=epsilon,
                                                 lower_bound=lower_bound,
                                                 property_id=property_id,
                                                 preprocess_orig_net=preprocess_orig_net,
                                                 abstraction_sequence_length=abstraction_sequence_length,
                                                 refinement_sequence_length=refinement_sequence_length)
    test_property = get_test_property_tiny() if is_tiny else get_test_property_acas(property_id)
    # for i in range(len(test_property["output"])):
    #     test_property["output"][i][1]["Lower"] = lower_bound
    net = network_from_nnet_file(fullname)
    print(f"size={len(net.layers)}")

    orig_net = copy.deepcopy(net)
    if args.preprocess_orig_net:
        preprocess(orig_net)

    if verbose:
        print("query using AR")
    t2 = time.time()
    if abstraction_type == "complete":
        print("complete")
        net = abstract_network(net)
    else:
        print("heuristic")
        net = heuristic_abstract(network=net, test_property=test_property,
                                 sequence_length=abstraction_sequence_length)
    abstraction_time = time.time() - t2
    num_of_refine_steps = 0
    ar_times = []
    ar_sizes = []
    refine_sequence_times = []
    while True:  # CEGAR / CETAR method
        t4 = time.time()
        vars1, stats1, query_result = get_query(
            network=net, test_property=test_property,
            verbose=consts.VERBOSE
        )
        debug_print(f'query_result={query_result}')
        t5 = time.time()
        ar_times.append(t5 - t4)
        ar_sizes.append(net.get_general_net_data()["num_nodes"])
        if verbose:
            print("query time after A and {} R steps is {}".format(num_of_refine_steps, t5-t4))
        debug_print(net.get_general_net_data())
        if query_result == "UNSAT":
            # if always y'<3.99 then also always y<3.99
            if verbose:
                print("UNSAT (finish)")
            break
        if query_result == "SAT":
            if verbose:
                print("SAT (have to check example on original net)")
                print(vars1)
            debug_print(f'vars1={vars1}')
            st = time.time()
            orig_net_output = orig_net.evaluate(vars1)
            print("evaluate: {}".format(time.time() - st))
            st = time.time()
            orig_net.speedy_evaluate(vars1)
            print("speedy evaluate: {}".format(time.time() - st))
            nodes2variables, variables2nodes = orig_net.get_variables()
            # we got y'>3.99, check if also y'>3.99 for the same input
            if is_satisfying_assignment(network=orig_net,
                                        test_property=test_property,
                                        output=orig_net_output,
                                        variables2nodes=variables2nodes):
                if verbose:
                    print("property holds also in orig - SAT (finish)")
                break  # also counter example for orig_net
            else:
                t_cur_refine_start = time.time()
                if verbose:
                    print("property doesn't holds in orig - spurious example")
                num_of_refine_steps += 1
                if verbose:
                    print("refine step #{}".format(num_of_refine_steps))
                if refinement_type == "cegar":
                    debug_print("cegar")
                    net = refine(network=net,
                                 sequence_length=refinement_sequence_length,
                                 example=vars1)
                else:
                    debug_print("cetar")
                    net = refine(network=net,
                                 sequence_length=refinement_sequence_length)
                t_cur_refine_end = time.time()
                refine_sequence_times.append(t_cur_refine_end - t_cur_refine_start)

    t3 = time.time()

    # time to check property on net with marabou using CEGAR
    total_ar_time = t3 - t2
    if verbose:
        print("ar query time = {}".format(total_ar_time))

    # time to check property on the last network in CEGAR
    last_net_ar_time = t3 - t4
    if verbose:
        print("last ar net query time = {}".format(last_net_ar_time))

    res = [
        ("net name", nnet_filename),
        ("property_id", property_id),
        ("abstraction_time", abstraction_time),
        ("query_result", query_result),
        ("num_of_refine_steps", num_of_refine_steps),
        ("total_ar_query_time", total_ar_time),
        ("ar_times", json.dumps(ar_times)),
        ("ar_sizes", json.dumps(ar_sizes)),
        ("refine_sequence_times", json.dumps(refine_sequence_times)),
        ("last_net_data", json.dumps(net.get_general_net_data())),
        ("last_query_time", last_net_ar_time)
    ]
    # generate dataframe from result
    df = pd.DataFrame.from_dict({x[0]: [x[1]] for x in res})
    df.to_json(os.path.join(results_directory, "df_" + results_filename))
    # write result to output file
    with open(os.path.join(results_directory, results_filename), "w") as fw:
        fw.write("-"*80)
        fw.write("parameters:")
        fw.write("-"*80)
        fw.write("\n")
        for arg in vars(args):
            fw.write("{}: {}\n".format(arg, getattr(args, arg)))
        fw.write("+"*80)
        fw.write("results:")
        fw.write("+"*80)
        fw.write("\n")
        for (k,v) in res:
            fw.write("{}: {}\n".format(k,v))
    return res
Esempio n. 7
0
                        type=int)
    parser.add_argument("-d", "--results_directory",
                        dest="results_directory",
                        default=consts.results_directory)
    parser.add_argument("-v", "--verbose",
                        dest="verbose",
                        default=consts.VERBOSE,
                        action="store_true")
    args = parser.parse_args()
    return args


if __name__ == '__main__':
    args = parse_args()
    # run experiment
    nnet_general_filename = "ACASXU_run2a_1_1_tiny_{}.nnet" if args.is_tiny \
        else "ACASXU_run2a_{}_batch_2000.nnet"
    one_exp_res = one_experiment(
        nnet_filename=nnet_general_filename.format(args.net_number),
        property_id=args.property_id,
        is_tiny=args.is_tiny,
        refinement_type=args.refinement_type,
        abstraction_type=args.abstraction_type,
        epsilon=args.epsilon,
        lower_bound=args.lower_bound,
        preprocess_orig_net=args.preprocess_orig_net,
        abstraction_sequence_length=args.abstraction_sequence_length,
        refinement_sequence_length=args.refinement_sequence_length,
        results_directory=args.results_directory)
    debug_print(one_exp_res)
Esempio n. 8
0
def one_experiment(nnet_filename,
                   refinement_type,
                   abstraction_type,
                   mechanism,
                   refinement_sequence,
                   abstraction_sequence,
                   results_directory,
                   property_id=consts.PROPERTY_ID,
                   verbose=consts.VERBOSE):
    test_property = get_test_property_acas(property_id)
    dynamically_import_marabou(query_type=test_property["type"])
    from core.nnet.read_nnet import (network_from_nnet_file, network2rlv)
    from core.abstraction.naive import abstract_network
    from core.abstraction.alg2 import heuristic_abstract_alg2
    from core.abstraction.random_abstract import heuristic_abstract_random
    # from core.abstraction.clustering_abstract import \
    #     heuristic_abstract_clustering
    from core.utils.marabou_query_utils import reduce_property_to_basic_form, get_query
    from core.refinement.refine import refine

    example_nets_dir_path = consts.PATH_TO_MARABOU_APPLICATIONS_ACAS_EXAMPLES
    fullname = os.path.join(example_nets_dir_path, nnet_filename)

    if not os.path.exists(results_directory):
        os.makedirs(results_directory)
    results_filename = generate_results_filename(
        nnet_filename=nnet_filename,
        property_id=property_id,
        mechanism=mechanism,
        refinement_type=refinement_type,
        abstraction_type=abstraction_type,
        refinement_sequence=refinement_sequence,
        abstraction_sequence=abstraction_sequence)

    # for i in range(len(test_property["output"])):
    #     test_property["output"][i][1]["Lower"] = lower_bound

    net = network_from_nnet_file(fullname)
    # print(net)
    print(f"size={len(net.layers)}")
    net, test_property = reduce_property_to_basic_form(
        network=net, test_property=test_property)

    # mechanism is vanilla marabou
    if mechanism == "marabou":
        print("query using vanilla Marabou")

        t0 = time.time()
        vars1, stats1, query_result = get_query(network=net,
                                                test_property=test_property,
                                                verbose=consts.VERBOSE)
        t1 = time.time()
        # time to check property on net with marabou
        marabou_time = t1 - t0
        if verbose:
            print(f"query time = {marabou_time}")

        res = [
            ("net_name", nnet_filename),
            ("property_id", property_id),
            ("query_result", query_result),
            ("orig_query_time", marabou_time),
            ("net_data", json.dumps(net.get_general_net_data())),
        ]
        saveResultToFile(results_directory, results_filename, res)
        return res

    else:
        # mechanism is marabou_with_ar
        orig_net = copy.deepcopy(net)
        print("query using Marabou with AR")
        t2 = time.time()
        if abstraction_type == "complete":
            net = abstract_network(net)
        elif abstraction_type == "heuristic_alg2":
            net = heuristic_abstract_alg2(network=net,
                                          test_property=test_property,
                                          sequence_length=abstraction_sequence)
        elif abstraction_type == "heuristic_random":
            net = heuristic_abstract_random(
                network=net,
                test_property=test_property,
                sequence_length=abstraction_sequence)
        # elif abstraction_type == "heuristic_clustering":
        #     net = heuristic_abstract_clustering(
        #         network=net,
        #         test_property=test_property,
        #         sequence_length=abstraction_sequence
        #     )
        else:
            raise NotImplementedError("unknown abstraction")
        abstraction_time = time.time() - t2

        num_of_refine_steps = 0
        ar_times = []
        ar_sizes = []
        refine_sequence_times = []
        spurious_examples = []
        while True:  # CEGAR / CETAR method
            t4 = time.time()
            print(net.get_general_net_data())

            vars1, stats1, query_result = get_query(
                network=net,
                test_property=test_property,
                verbose=consts.VERBOSE)

            t5 = time.time()
            ar_times.append(t5 - t4)
            ar_sizes.append(net.get_general_net_data()["num_nodes"])
            # if verbose:
            print("query time after A and {} R steps is {}".format(
                num_of_refine_steps, t5 - t4))
            debug_print(net.get_general_net_data())
            if query_result == "UNSAT":
                print("+" * 100)
                # if always y'<3.99 then also always y<3.99
                if verbose:
                    print("UNSAT (finish)")
                break
            if query_result == "SAT":
                if verbose:
                    print("SAT (have to check example on original net)")
                    print(vars1)
                # debug_print(f'vars1={vars1}')
                # st = time.time()
                # orig_net_output = orig_net.evaluate(vars1)
                # print("evaluate: {}".format(time.time() - st))
                # st = time.time()
                orig_net_output = orig_net.speedy_evaluate(vars1)
                # print(f"orig_net_output={orig_net_output}")
                # print(f"orig_net.name2node_map={orig_net.name2node_map}")
                # print("speedy evaluate: {}".format(time.time() - st))
                nodes2variables, variables2nodes = orig_net.get_variables()
                # we got y'>3.99, check if also y'>3.99 for the same input
                if is_satisfying_assignment(network=orig_net,
                                            test_property=test_property,
                                            output=orig_net_output,
                                            variables2nodes=variables2nodes):
                    if verbose:
                        print("property holds also in orig - SAT (finish)")
                    break  # also counter example for orig_net
                else:
                    spurious_examples.append(vars1)
                    t_cur_refine_start = time.time()
                    if verbose:
                        print(
                            "property doesn't holds in orig - spurious example"
                        )
                    num_of_refine_steps += 1
                    if verbose:
                        print("refine step #{}".format(num_of_refine_steps))
                    # refine until all spurious examples are satisfied
                    # since all spurious examples are satisfied in the original
                    # network, the loop stops until net will be fully refined
                    refinement_sequences_counter = 0
                    while True:
                        refinement_sequences_counter += 1
                        # print(f"refinement_sequences_counter={refinement_sequences_counter}")
                        if refinement_type == "cegar":
                            debug_print("cegar")
                            net = refine(network=net,
                                         sequence_length=refinement_sequence,
                                         example=vars1)
                        else:
                            debug_print("weight_based")
                            net = refine(network=net,
                                         sequence_length=refinement_sequence)
                        # after refining, check if the current spurious example is
                        # already not a counter example (i.e. not satisfied in the
                        # refined network). stop if not satisfied, continue if yes
                        net_output = net.speedy_evaluate(vars1)
                        # print(f"net_output={net_output}")
                        # print(f"net.name2node_map={net.name2node_map}")
                        nodes2variables, variables2nodes = net.get_variables()
                        if not is_satisfying_assignment(
                                network=net,
                                test_property=test_property,
                                output=net_output,
                                variables2nodes=variables2nodes):
                            break
                    t_cur_refine_end = time.time()
                    refine_sequence_times.append(t_cur_refine_end -
                                                 t_cur_refine_start)

        t3 = time.time()

        # time to check property on net with marabou using CEGAR
        total_ar_time = t3 - t2
        if verbose:
            print("ar query time = {}".format(total_ar_time))

        # time to check property on the last network in CEGAR
        last_net_ar_time = t3 - t4
        if verbose:
            print("last ar net query time = {}".format(last_net_ar_time))

        res = [("net_name", nnet_filename), ("property_id", property_id),
               ("abstraction_time", abstraction_time),
               ("query_result", query_result),
               ("num_of_refine_steps", num_of_refine_steps),
               ("total_ar_query_time", total_ar_time),
               ("ar_times", json.dumps(ar_times)),
               ("ar_sizes", json.dumps(ar_sizes)),
               ("refine_sequence_times", json.dumps(refine_sequence_times)),
               ("last_net_data", json.dumps(net.get_general_net_data())),
               ("last_query_time", last_net_ar_time)]
        return res
Esempio n. 9
0
def one_experiment(nnet_filename,
                   is_tiny,
                   lower_bound,
                   preprocess_orig_net,
                   results_directory,
                   property_id=consts.PROPERTY_ID,
                   is_adversarial_property=False,
                   verbose=consts.VERBOSE):
    if verbose:
        debug_print("one_experiment_marabou({})".format(
            json.dumps([
                nnet_filename, is_tiny, lower_bound, preprocess_orig_net,
                property_id, results_directory
            ])))
    if is_tiny:
        example_nets_dir_path = consts.PATH_TO_MARABOU_ACAS_EXAMPLES
    else:
        example_nets_dir_path = consts.PATH_TO_MARABOU_APPLICATIONS_ACAS_EXAMPLES
    fullname = os.path.join(example_nets_dir_path, nnet_filename)

    if not os.path.exists(results_directory):
        os.makedirs(results_directory)
    results_filename = generate_results_filename(
        nnet_filename=nnet_filename,
        is_tiny=is_tiny,
        lower_bound=lower_bound,
        preprocess_orig_net=preprocess_orig_net,
        property_id=property_id,
        is_adversarial_property=is_adversarial_property)
    test_property = get_test_property_tiny(
    ) if is_tiny else get_test_property_acas(property_id)
    # for i in range(len(test_property["output"])):
    #     test_property["output"][i][1]["Lower"] = lower_bound
    net = network_from_nnet_file(fullname)

    orig_net = copy.deepcopy(net)
    if args.preprocess_orig_net:
        preprocess(orig_net)

    # query original net
    if verbose:
        print("query orig_net")
    t0 = time.time()
    if verbose:
        debug_print("orig_net.get_general_net_data(): {}".format(
            orig_net.get_general_net_data()))
    vars1, stats1, query_result = get_query(
        network=orig_net,
        test_property=test_property,
        is_adversarial_property=consts.IS_ADVERSARIAL,
        verbose=consts.VERBOSE)
    t1 = time.time()
    # time to check property on net with marabou
    marabou_time = t1 - t0
    if verbose:
        print("orig_net query time ={}".format(marabou_time))

    res = [
        ("net name", nnet_filename),
        ("property_id", property_id),
        ("query_result", query_result),
        ("orig_query_time", marabou_time),
        ("net_data", json.dumps(orig_net.get_general_net_data())),
    ]
    # generate dataframe from result
    df = pd.DataFrame.from_dict({x[0]: [x[1]] for x in res})
    df.to_json(os.path.join(results_directory, "df_" + results_filename))
    with open(os.path.join(results_directory, results_filename), "w") as fw:
        fw.write("-" * 80)
        fw.write("parameters:")
        fw.write("-" * 80)
        fw.write("\n")
        for arg in vars(args):
            fw.write("{}: {}\n".format(arg, getattr(args, arg)))
        fw.write("+" * 80)
        fw.write("results:")
        fw.write("+" * 80)
        fw.write("\n")
        for (k, v) in res:
            fw.write("{}: {}\n".format(k, v))
    return res
Esempio n. 10
0
                        default=consts.IS_ADVERSARIAL,
                        type=bool)
    parser.add_argument("-l",
                        "--lower_bound",
                        dest="lower_bound",
                        default=25000,
                        type=int)
    parser.add_argument("-d",
                        "--results_directory",
                        dest="results_directory",
                        default=consts.results_directory)
    parser.add_argument("-v",
                        "--verbose",
                        dest="verbose",
                        default=consts.VERBOSE,
                        action="store_true")
    args = parser.parse_args()

    # run experiment
    nnet_general_filename = "ACASXU_run2a_1_1_tiny_{}.nnet" if args.is_tiny else "ACASXU_run2a_{}_batch_2000.nnet"
    res_one_exp = one_experiment(
        nnet_filename=nnet_general_filename.format(args.net_number),
        is_tiny=args.is_tiny,
        property_id=args.property_id,
        preprocess_orig_net=args.preprocess_orig_net,
        lower_bound=args.lower_bound,
        results_directory=args.results_directory,
        is_adversarial_property=args.is_adversarial_property,
        verbose=args.verbose)
    debug_print(res_one_exp)
Esempio n. 11
0
def one_experiment(nnet_filename, property_filename, is_tiny, refinement_type, abstraction_type, epsilon,
                   lower_bound, orig_net_query, preprocess_orig_net,
                   refinement_sequence_length, abstraction_sequence_length, results_directory):
    debug_print("one_experiment({})".format(json.dumps([nnet_filename, is_tiny, refinement_type, abstraction_type,
                                                        epsilon, lower_bound, orig_net_query, preprocess_orig_net])))
    if is_tiny:
        example_nets_dir_path = consts.PATH_TO_MARABOU_ACAS_EXAMPLES
    else:
        example_nets_dir_path = consts.PATH_TO_MARABOU_APPLICATIONS_ACAS_EXAMPLES
    fullname = os.path.join(example_nets_dir_path, nnet_filename)

    if not os.path.exists(results_directory):
        os.makedirs(results_directory)
    results_filename = generate_results_filename(nnet_filename=nnet_filename,
                                                 property_filename=property_filename,
                                                 is_tiny=is_tiny,
                                                 refinement_type=refinement_type,
                                                 abstraction_type=abstraction_type,
                                                 epsilon=epsilon,
                                                 lower_bound=lower_bound,
                                                 orig_net_query=orig_net_query,
                                                 preprocess_orig_net=preprocess_orig_net,
                                                 abstraction_sequence_length=abstraction_sequence_length,
                                                 refinement_sequence_length=refinement_sequence_length)
    test_property = read_test_property(property_filename)
    for i in range(len(test_property["output"])):
        test_property["output"][i][1]["Lower"] = lower_bound
    net = network_from_nnet_file(fullname)
    orig_net = copy.deepcopy(net)
    if args.preprocess_orig_net:
        orig_net.preprocess()

    # query original net
    if args.orig_net_query:
        print("query orig_net")
        t0 = time.time()
        debug_print("orig_net.get_general_net_data(): {}".format(orig_net.get_general_net_data()))
        vars1, stats1, query_result = orig_net.get_query(test_property)
        t1 = time.time()
        # time to check property on net with marabou
        marabou_time = t1 - t0
        print("orig_net query time ={}".format(marabou_time))
    else:
        marabou_time = None

    print("query using AR")
    t2 = time.time()
    if abstraction_type == "complete":
        net = net.abstract()
    else:
        net = net.heuristic_abstract(test_property=test_property, sequence_length=abstraction_sequence_length)
    abstraction_time = time.time() - t2
    num_of_refine_steps = 0
    ar_times = []
    ar_sizes = []
    refine_sequence_times = []
    while True:  # CEGAR / CETAR method
        t4 = time.time()
        vars1, stats1, query_result = net.get_query(test_property)
        t5 = time.time()
        ar_times.append(t5 - t4)
        ar_sizes.append(net.get_general_net_data()["num_nodes"])
        print("query time after A and {} R steps is {}".format(num_of_refine_steps, t5-t4))
        debug_print(net.get_general_net_data())
        if query_result == "UNSAT":
            # if always y'<3.99 then also always y<3.99
            print("UNSAT (finish)")
            break
        if query_result == "SAT":
            print("SAT (have to check example on original net)")
            print(vars1)
            orig_net_output = orig_net.evaluate(vars1)
            nodes2variables, variables2nodes = orig_net.get_variables()
            # we got y'>3.99, check if also y'>3.99 for the same input
            if orig_net.does_property_holds(test_property,
                                            orig_net_output,
                                            variables2nodes):
                print("property holds also in orig - SAT (finish)")
                break  # also counter example for orig_net
            else:
                t_cur_refine_start = time.time()
                print("property doesn't holds in orig - spurious example")
                num_of_refine_steps += 1
                print("refine step #{}".format(num_of_refine_steps))
                if refinement_type == "cegar":
                    net = net.refine(sequence_length=refinement_sequence_length, example=vars1)
                else:
                    net = net.refine(sequence_length=refinement_sequence_length)
                t_cur_refine_end = time.time()
                refine_sequence_times.append(t_cur_refine_end - t_cur_refine_start)

    t3 = time.time()

    # time to check property on net with marabou using CEGAR
    total_ar_time = t3 - t2
    print("ar query time = {}".format(total_ar_time))

    # time to check property on the last network in CEGAR
    last_net_ar_time = t3 - t4
    print("last ar net query time = {}".format(last_net_ar_time))

    res = [
        ("net name", nnet_filename),
        ("abstraction_time", abstraction_time),
        ("query_result", query_result),
        ("orig_query_time", marabou_time),
        ("num_of_refine_steps", num_of_refine_steps),
        ("total_ar_query_time", total_ar_time),
        ("ar_times", json.dumps(ar_times)),
        ("ar_sizes", json.dumps(ar_sizes)),
        ("refine_sequence_times", json.dumps(refine_sequence_times)),
        ("last_net_data", json.dumps(net.get_general_net_data())),
        ("last_query_time", last_net_ar_time)
    ]
    with open(os.path.join(results_directory, results_filename), "w") as fw:
        fw.write("-"*80)
        fw.write("parameters:")
        fw.write("-"*80)
        fw.write("\n")
        for arg in vars(args):
            fw.write("{}: {}\n".format(arg, getattr(args, arg)))
        fw.write("+"*80)
        fw.write("results:")
        fw.write("+"*80)
        fw.write("\n")
        for (k,v) in res:
            fw.write("{}: {}\n".format(k,v))
    return res
Esempio n. 12
0
                        default=refinement_type_default, choices=["cegar", "cetar"])
    parser.add_argument("-p", "--preprocess_orig_net", dest="preprocess_orig_net",
                        default=consts.COMPARE_TO_PREPROCESSED_NET, action="store_true")
    parser.add_argument("-o", "--orig_net_query", dest="orig_net_query", default=False, action="store_true")
    parser.add_argument("-e", "--epsilon", dest="epsilon", default=consts.EPSILON, type=float)
    parser.add_argument("-l", "--lower_bound", dest="lower_bound", default=25000, type=int)
    parser.add_argument("-as", "--abstraction_sequence_length", dest="abstraction_sequence_length", default=100,
                        type=int)
    parser.add_argument("-rs", "--refinement_sequence_length", dest="refinement_sequence_length", default=100,
                        type=int)
    parser.add_argument("-d", "--results_directory", dest="results_directory", default=consts.results_directory)
    parser.add_argument("-v", "--verbose", dest="verbose", default=consts.VERBOSE, action="store_true")
    args = parser.parse_args()

    # run experiment
    res = one_experiment(nnet_filename=args.nnet_filename,
                         property_filename=args.property_filename,
                         is_tiny=args.is_tiny,
                         refinement_type=args.refinement_type,
                         abstraction_type=args.abstraction_type,
                         epsilon=args.epsilon,
                         lower_bound=args.lower_bound,
                         orig_net_query=args.orig_net_query,
                         preprocess_orig_net=args.preprocess_orig_net,
                         abstraction_sequence_length=args.abstraction_sequence_length,
                         refinement_sequence_length=args.refinement_sequence_length,
                         results_directory=args.results_directory)
    debug_print(res)


Esempio n. 13
0
def split_back(network: Network, part: AnyStr) -> None:
    """
    implement the refinement step. split back part from the union node it was
    grouped into into a separated node
    :param network: Network
    :param part: Str of the name of the original node that is part of the union
    """
    # assume that layer_index is in [2, ..., L-1] (L = num of layers)
    try:
        layer_index = int(part.split("_")[1])
    except IndexError:
        debug_print("IndexError in core.test_refinement.step.split_back()")
        import IPython
        IPython.embed()

    layer = network.layers[layer_index]
    next_layer = network.layers[layer_index + 1]
    prev_layer = network.layers[layer_index - 1]

    part2node_map = network.get_part2node_map()
    union_node = network.name2node_map[part2node_map[part]]

    parts = union_node.name.split("+")
    other_parts = [p for p in parts if p != part]
    if not other_parts:
        return

    part_node = ARNode(name=part,
                       ar_type=union_node.ar_type,
                       activation_func=union_node.activation_func,
                       in_edges=[],
                       out_edges=[],
                       bias=network.orig_name2node_map[part].bias)
    bias = sum([
        network.orig_name2node_map[other_part].bias
        for other_part in other_parts
    ])

    other_parts_node = ARNode(name="+".join(other_parts),
                              ar_type=union_node.ar_type,
                              activation_func=union_node.activation_func,
                              in_edges=[],
                              out_edges=[],
                              bias=bias)

    splitting_nodes = [part_node, other_parts_node]

    for splitting_node in splitting_nodes:
        # print("splitting_node.name={}".format(splitting_node.name))
        for next_layer_node in next_layer.nodes:
            group_a = splitting_node.name.split("+")
            group_b = next_layer_node.name.split("+")
            # print("call 1 - group_a")
            # print(group_a)
            # print("call 1 - group_b")
            # print(group_b)
            out_edge_weight = calculate_weight_of_edge_between_two_part_groups(
                network=network, group_a=group_a, group_b=group_b)

            if out_edge_weight is not None:
                out_edge = Edge(splitting_node.name, next_layer_node.name,
                                out_edge_weight)
                splitting_node.out_edges.append(out_edge)
                next_layer_node.in_edges.append(out_edge)
            # fill_zero_edges(network)
        for prev_layer_node in prev_layer.nodes:
            group_a = prev_layer_node.name.split("+")
            group_b = splitting_node.name.split("+")
            # print("call 2 - group_a")
            # print(group_a)
            # print("call 2 - group_b")
            # print(group_b)
            in_edge_weight = calculate_weight_of_edge_between_two_part_groups(
                network=network, group_a=group_a, group_b=group_b)
            if in_edge_weight is not None:
                in_edge = Edge(prev_layer_node.name, splitting_node.name,
                               in_edge_weight)
                splitting_node.in_edges.append(in_edge)
                prev_layer_node.out_edges.append(in_edge)
            # fill_zero_edges(network)
        layer.nodes.append(splitting_node)
        fill_zero_edges(network)
    network.remove_node(union_node, layer_index)
    network.generate_name2node_map()
Esempio n. 14
0
def planet_with_ar(json_content):
    print(json_content)
    parameter = json.loads(json_content)
    refinement_type = parameter['refinement_type']
    abstraction_type = parameter['abstract_type']
    refinement_sequence = int(parameter['refinement_sequence'])
    abstraction_sequence = int(parameter['abstraction_sequence'])

    test_property = generate_test_property(parameter)

    net = network_from_nnet_file(parameter['filepath'])
    net, test_property = reduce_property_to_basic_form(
        network=net, test_property=test_property)

    orig_net = copy.deepcopy(net)

    # Abstract
    t2 = time.time()
    if abstraction_type == "complete":
        net = abstract_network(net)
    elif abstraction_type == "heuristic_alg2":
        net = heuristic_abstract_alg2(network=net,
                                      test_property=test_property,
                                      sequence_length=abstraction_sequence)
    elif abstraction_type == "heuristic_random":
        net = heuristic_abstract_random(network=net,
                                        test_property=test_property,
                                        sequence_length=abstraction_sequence)
    # elif abstraction_type == "heuristic_clustering":
    #     net = heuristic_abstract_clustering(
    #         network=net,
    #         test_property=test_property,
    #         sequence_length=abstraction_sequence
    #     )
    else:
        raise NotImplementedError("unknown abstraction")
    abstraction_time = time.time() - t2

    network2rlv(net, test_property, "network.rlv")

    shell_cmd = "{} network.rlv".format(parameter['planet'])
    cmd = shlex.split(shell_cmd)

    print("here")

    num_of_refine_steps = 0
    ar_times = []
    ar_sizes = []
    refine_sequence_times = []
    spurious_examples = []

    while True:
        t4 = time.time()
        print(net.get_general_net_data())

        p = subprocess.Popen(cmd,
                             shell=False,
                             stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE)

        out, err = p.communicate()

        res = out.decode(encoding='utf-8').strip().split('\n')

        query_result = res[0]

        if 'UNSAT' == query_result:
            break
        if 'SAT' == query_result:

            # counterexample
            vars1 = {}
            for i in range(5):
                content = res[3 + i].split('/')[-1].strip()
                vars1[i] = float(content)

            orig_net_output = orig_net.speedy_evaluate(vars1)

            nodes2variables, variables2nodes = orig_net.get_variables()
            # we got y'>3.99, check if also y'>3.99 for the same input
            if is_satisfying_assignment(network=orig_net,
                                        test_property=test_property,
                                        output=orig_net_output,
                                        variables2nodes=variables2nodes):
                print("property holds also in orig - SAT (finish)")
                break  # also counter example for orig_net
            else:
                print("need to refine")
                spurious_examples.append(vars1)
                num_of_refine_steps += 1
                t_cur_refine_start = time.time()
                refinement_sequences_counter = 0
                while True:
                    refinement_sequences_counter += 1
                    if refinement_type == "cegar":
                        debug_print("cegar")
                        net = refine(network=net,
                                     sequence_length=refinement_sequence,
                                     example=vars1)
                    else:
                        debug_print("weight_based")
                        net = refine(network=net,
                                     sequence_length=refinement_sequence)

                    # update network file
                    network2rlv(net, test_property, "network.rlv")

                    net_output = net.speedy_evaluate(vars1)

                    nodes2variables, variables2nodes = net.get_variables()
                    if not is_satisfying_assignment(
                            network=orig_net,
                            test_property=test_property,
                            output=orig_net_output,
                            variables2nodes=variables2nodes):
                        break
                t_cur_refine_end = time.time()
                refine_sequence_times.append(t_cur_refine_end -
                                             t_cur_refine_start)

    t3 = time.time()
    total_ar_time = t3 - t2
    last_net_ar_time = t3 - t4
    res = {
        "query_result": query_result,
        "num_of_refine_steps": str(num_of_refine_steps),
        "ar_times": str(ar_times),
        "ar_sizes": str(ar_sizes),
        "refine_sequence_times": str(refine_sequence_times),
    }
    return json.dumps(res)
Esempio n. 15
0
def verify_with_ar(abstract_net,
                   orig_net,
                   test_property,
                   refinement_type,
                   abstraction_type,
                   refinement_sequence,
                   abstraction_sequence,
                   verbose=consts.VERBOSE):
    try:
        # mechanism is marabou_with_ar
        dynamically_import_marabou(query_type=test_property["type"])

        net = copy.deepcopy(abstract_net)
        orig_net = copy.deepcopy(orig_net)
        test_property = copy.deepcopy(test_property)

        num_of_refine_steps = 0
        ar_times = []
        ar_sizes = []
        refine_sequence_times = []
        spurious_examples = []

        start = time.time()
        while True:  # CEGAR / CETAR method
            t4 = time.time()
            vars1, stats1, query_result = get_query(
                network=net,
                test_property=test_property,
                verbose=consts.VERBOSE)
            t5 = time.time()
            ar_times.append(t5 - t4)
            ar_sizes.append(net.get_general_net_data()["num_nodes"])
            # if verbose:
            print("query time after A and {} R steps is {}".format(
                num_of_refine_steps, t5 - t4))
            if query_result == "UNSAT":
                # if always y'<3.99 then also always y<3.99
                if verbose:
                    print("UNSAT (finish)")
                break
            if query_result == "SAT":
                if verbose:
                    print("SAT (have to check example on original net)")
                orig_net_output = orig_net.speedy_evaluate(vars1)
                nodes2variables, variables2nodes = orig_net.get_variables()
                # we got y'>3.99, check if also y'>3.99 for the same input
                if is_satisfying_assignment(network=orig_net,
                                            test_property=test_property,
                                            output=orig_net_output,
                                            variables2nodes=variables2nodes):
                    if verbose:
                        print("property holds also in orig - SAT (finish)")
                    break  # also counter example for orig_net
                else:
                    spurious_examples.append(vars1)
                    t_cur_refine_start = time.time()
                    if verbose:
                        print(
                            "property doesn't holds in orig - spurious example"
                        )
                    num_of_refine_steps += 1
                    if verbose:
                        print("refine step #{}".format(num_of_refine_steps))
                    # refine until all spurious examples are satisfied
                    # since all spurious examples are satisfied in the original
                    # network, the loop stops until net will be fully refined
                    refinement_sequences_counter = 0
                    while True:
                        refinement_sequences_counter += 1
                        # print(f"refinement_sequences_counter={refinement_sequences_counter}")
                        if refinement_type == "cegar":
                            debug_print("cegar")
                            net = refine(network=net,
                                         sequence_length=refinement_sequence,
                                         example=vars1)
                        else:
                            debug_print("weight_based")
                            net = refine(network=net,
                                         sequence_length=refinement_sequence)
                        # after refining, check if the current spurious example is
                        # already not a counter example (i.e. not satisfied in the
                        # refined network). stop if not satisfied, continue if yes
                        net_output = net.speedy_evaluate(vars1)
                        # print(f"net_output={net_output}")
                        # print(f"net.name2node_map={net.name2node_map}")
                        nodes2variables, variables2nodes = net.get_variables()
                        if not is_satisfying_assignment(
                                network=net,
                                test_property=test_property,
                                output=net_output,
                                variables2nodes=variables2nodes):
                            break
                    t_cur_refine_end = time.time()
                    refine_sequence_times.append(t_cur_refine_end -
                                                 t_cur_refine_start)

        t3 = time.time()
        consume = t3 - start
        # time to check property on the last network in CEGAR
        last_net_ar_time = t3 - t4
        if verbose:
            print("last ar net query time = {}".format(last_net_ar_time))

        res = {
            "query_result": query_result,
            "num_of_refine_steps": str(num_of_refine_steps),
            "ar_times": str(ar_times),
            "ar_sizes": str(ar_sizes),
            "refine_sequence_times": str(refine_sequence_times),
            "refinement_consume": str(consume)
        }
        return json.dumps(res)
    except Exception as e:
        print("exception occur", e)