Beispiel #1
0
def abstract_network(network: Network,
                     do_preprocess: bool = True,
                     visualize: bool = False,
                     verbose: bool = VERBOSE) -> Network:
    if VERBOSE:
        debug_print("original net:")
        print(network)
    if do_preprocess:
        preprocess(network)
    next_layer_part2union = {}
    for i in range(len(network.layers) - 1, FIRST_ABSTRACT_LAYER - 1, -1):
        layer = network.layers[i]
        next_layer_part2union = layer.abstract(network.name2node_map,
                                               next_layer_part2union)
        # update name2node_map - add new union nodes and remove inner nodes
        # removal precedes addition for equal names case (e.g output layer)
        for part in next_layer_part2union.keys():
            del network.name2node_map[part]
        network.generate_name2node_map()
        # # print (i)
        # if visualize:
        #     title = "after layer {} test_abstraction".format(i)
        #     visualize_network(network_layers=network.layers,
        #                       title=title,
        #                       next_layer_part2union=next_layer_part2union,
        #                       debug=False)
        # if verbose:
        #     debug_print("net after abstract {}'th layer:".format(i))
        #     print(network)
    finish_abstraction(network, next_layer_part2union, verbose=verbose)
    return network
Beispiel #2
0
def heuristic_abstract_random(network: Network,
                              test_property: dict,
                              do_preprocess: bool = True,
                              sequence_length: int = 50,
                              verbose: bool = VERBOSE) -> Network:
    """
    abstract a network until @test_property holds in resulted abstract network
    the abstraction is done on @sequence_length pairs that are chosen randomly
    :param network: Network represent the network
    :param test_property: Dict represents the property to check
    :param do_preprocess: Bool, is pre-proprocess required before abstraction
    :param sequence_length: Int, number of abstraction steps in each sequence
    :param verbose: Bool, verbosity flag
    :return: Network, abstract network according to alg 2 in the paper
    """
    if do_preprocess:
        preprocess(network)
    input_size = len(network.layers[0].nodes)
    # generate random inputs in the bound of the test property
    random_inputs = get_limited_random_inputs(input_size=input_size,
                                              test_property=test_property)
    # while no violation occurs, continue to abstract
    while not has_violation(network, test_property, random_inputs):
        # check that the network is not fully abstracted
        if all(len(layer.nodes) <= 4 for layer in network.layers[1:-1]):
            break
        # abstract constant number of random nodes
        layer_couples = []
        for i, layer in enumerate(network.layers[FIRST_ABSTRACT_LAYER:-1]):
            layer_couples.extend(layer.get_couples_of_same_ar_type())
        random.shuffle(layer_couples)

        # union these couples
        best_sequence_pairs = layer_couples
        cur_abstraction_seq_len = 0
        for pair in best_sequence_pairs:
            if cur_abstraction_seq_len >= sequence_length:
                break
            if pair[0].name not in network.name2node_map or \
                    pair[1].name not in network.name2node_map:
                continue
            cur_abstraction_seq_len += 1
            union_name = "+".join([pair[0].name, pair[1].name])
            union_couple_of_nodes(network, pair[0], pair[1])
            nl_p2u = {pair[0].name: union_name, pair[1].name: union_name}
            finish_abstraction(network=network,
                               next_layer_part2union=nl_p2u,
                               verbose=verbose)
    return network
Beispiel #3
0
def one_experiment(nnet_filename, is_tiny, refinement_type, abstraction_type,
                   epsilon, lower_bound, preprocess_orig_net,
                   refinement_sequence_length, abstraction_sequence_length,
                   results_directory, property_id=consts.PROPERTY_ID,
                   verbose=consts.VERBOSE):
    if verbose:
        debug_print("one_experiment_cegarabou({})".format(json.dumps([nnet_filename, is_tiny, refinement_type,
                                                                      abstraction_type, epsilon, lower_bound,
                                                                      property_id, preprocess_orig_net])))

    if is_tiny:
        example_nets_dir_path = consts.PATH_TO_MARABOU_ACAS_EXAMPLES
    else:
        example_nets_dir_path = consts.PATH_TO_MARABOU_APPLICATIONS_ACAS_EXAMPLES
    fullname = os.path.join(example_nets_dir_path, nnet_filename)

    if not os.path.exists(results_directory):
        os.makedirs(results_directory)
    results_filename = generate_results_filename(nnet_filename=nnet_filename,
                                                 is_tiny=is_tiny,
                                                 refinement_type=refinement_type,
                                                 abstraction_type=abstraction_type,
                                                 epsilon=epsilon,
                                                 lower_bound=lower_bound,
                                                 property_id=property_id,
                                                 preprocess_orig_net=preprocess_orig_net,
                                                 abstraction_sequence_length=abstraction_sequence_length,
                                                 refinement_sequence_length=refinement_sequence_length)
    test_property = get_test_property_tiny() if is_tiny else get_test_property_acas(property_id)
    # for i in range(len(test_property["output"])):
    #     test_property["output"][i][1]["Lower"] = lower_bound
    net = network_from_nnet_file(fullname)
    print(f"size={len(net.layers)}")

    orig_net = copy.deepcopy(net)
    if args.preprocess_orig_net:
        preprocess(orig_net)

    if verbose:
        print("query using AR")
    t2 = time.time()
    if abstraction_type == "complete":
        print("complete")
        net = abstract_network(net)
    else:
        print("heuristic")
        net = heuristic_abstract(network=net, test_property=test_property,
                                 sequence_length=abstraction_sequence_length)
    abstraction_time = time.time() - t2
    num_of_refine_steps = 0
    ar_times = []
    ar_sizes = []
    refine_sequence_times = []
    while True:  # CEGAR / CETAR method
        t4 = time.time()
        vars1, stats1, query_result = get_query(
            network=net, test_property=test_property,
            verbose=consts.VERBOSE
        )
        debug_print(f'query_result={query_result}')
        t5 = time.time()
        ar_times.append(t5 - t4)
        ar_sizes.append(net.get_general_net_data()["num_nodes"])
        if verbose:
            print("query time after A and {} R steps is {}".format(num_of_refine_steps, t5-t4))
        debug_print(net.get_general_net_data())
        if query_result == "UNSAT":
            # if always y'<3.99 then also always y<3.99
            if verbose:
                print("UNSAT (finish)")
            break
        if query_result == "SAT":
            if verbose:
                print("SAT (have to check example on original net)")
                print(vars1)
            debug_print(f'vars1={vars1}')
            st = time.time()
            orig_net_output = orig_net.evaluate(vars1)
            print("evaluate: {}".format(time.time() - st))
            st = time.time()
            orig_net.speedy_evaluate(vars1)
            print("speedy evaluate: {}".format(time.time() - st))
            nodes2variables, variables2nodes = orig_net.get_variables()
            # we got y'>3.99, check if also y'>3.99 for the same input
            if is_satisfying_assignment(network=orig_net,
                                        test_property=test_property,
                                        output=orig_net_output,
                                        variables2nodes=variables2nodes):
                if verbose:
                    print("property holds also in orig - SAT (finish)")
                break  # also counter example for orig_net
            else:
                t_cur_refine_start = time.time()
                if verbose:
                    print("property doesn't holds in orig - spurious example")
                num_of_refine_steps += 1
                if verbose:
                    print("refine step #{}".format(num_of_refine_steps))
                if refinement_type == "cegar":
                    debug_print("cegar")
                    net = refine(network=net,
                                 sequence_length=refinement_sequence_length,
                                 example=vars1)
                else:
                    debug_print("cetar")
                    net = refine(network=net,
                                 sequence_length=refinement_sequence_length)
                t_cur_refine_end = time.time()
                refine_sequence_times.append(t_cur_refine_end - t_cur_refine_start)

    t3 = time.time()

    # time to check property on net with marabou using CEGAR
    total_ar_time = t3 - t2
    if verbose:
        print("ar query time = {}".format(total_ar_time))

    # time to check property on the last network in CEGAR
    last_net_ar_time = t3 - t4
    if verbose:
        print("last ar net query time = {}".format(last_net_ar_time))

    res = [
        ("net name", nnet_filename),
        ("property_id", property_id),
        ("abstraction_time", abstraction_time),
        ("query_result", query_result),
        ("num_of_refine_steps", num_of_refine_steps),
        ("total_ar_query_time", total_ar_time),
        ("ar_times", json.dumps(ar_times)),
        ("ar_sizes", json.dumps(ar_sizes)),
        ("refine_sequence_times", json.dumps(refine_sequence_times)),
        ("last_net_data", json.dumps(net.get_general_net_data())),
        ("last_query_time", last_net_ar_time)
    ]
    # generate dataframe from result
    df = pd.DataFrame.from_dict({x[0]: [x[1]] for x in res})
    df.to_json(os.path.join(results_directory, "df_" + results_filename))
    # write result to output file
    with open(os.path.join(results_directory, results_filename), "w") as fw:
        fw.write("-"*80)
        fw.write("parameters:")
        fw.write("-"*80)
        fw.write("\n")
        for arg in vars(args):
            fw.write("{}: {}\n".format(arg, getattr(args, arg)))
        fw.write("+"*80)
        fw.write("results:")
        fw.write("+"*80)
        fw.write("\n")
        for (k,v) in res:
            fw.write("{}: {}\n".format(k,v))
    return res
Beispiel #4
0
def one_experiment(nnet_filename,
                   is_tiny,
                   lower_bound,
                   preprocess_orig_net,
                   results_directory,
                   property_id=consts.PROPERTY_ID,
                   is_adversarial_property=False,
                   verbose=consts.VERBOSE):
    if verbose:
        debug_print("one_experiment_marabou({})".format(
            json.dumps([
                nnet_filename, is_tiny, lower_bound, preprocess_orig_net,
                property_id, results_directory
            ])))
    if is_tiny:
        example_nets_dir_path = consts.PATH_TO_MARABOU_ACAS_EXAMPLES
    else:
        example_nets_dir_path = consts.PATH_TO_MARABOU_APPLICATIONS_ACAS_EXAMPLES
    fullname = os.path.join(example_nets_dir_path, nnet_filename)

    if not os.path.exists(results_directory):
        os.makedirs(results_directory)
    results_filename = generate_results_filename(
        nnet_filename=nnet_filename,
        is_tiny=is_tiny,
        lower_bound=lower_bound,
        preprocess_orig_net=preprocess_orig_net,
        property_id=property_id,
        is_adversarial_property=is_adversarial_property)
    test_property = get_test_property_tiny(
    ) if is_tiny else get_test_property_acas(property_id)
    # for i in range(len(test_property["output"])):
    #     test_property["output"][i][1]["Lower"] = lower_bound
    net = network_from_nnet_file(fullname)

    orig_net = copy.deepcopy(net)
    if args.preprocess_orig_net:
        preprocess(orig_net)

    # query original net
    if verbose:
        print("query orig_net")
    t0 = time.time()
    if verbose:
        debug_print("orig_net.get_general_net_data(): {}".format(
            orig_net.get_general_net_data()))
    vars1, stats1, query_result = get_query(
        network=orig_net,
        test_property=test_property,
        is_adversarial_property=consts.IS_ADVERSARIAL,
        verbose=consts.VERBOSE)
    t1 = time.time()
    # time to check property on net with marabou
    marabou_time = t1 - t0
    if verbose:
        print("orig_net query time ={}".format(marabou_time))

    res = [
        ("net name", nnet_filename),
        ("property_id", property_id),
        ("query_result", query_result),
        ("orig_query_time", marabou_time),
        ("net_data", json.dumps(orig_net.get_general_net_data())),
    ]
    # generate dataframe from result
    df = pd.DataFrame.from_dict({x[0]: [x[1]] for x in res})
    df.to_json(os.path.join(results_directory, "df_" + results_filename))
    with open(os.path.join(results_directory, results_filename), "w") as fw:
        fw.write("-" * 80)
        fw.write("parameters:")
        fw.write("-" * 80)
        fw.write("\n")
        for arg in vars(args):
            fw.write("{}: {}\n".format(arg, getattr(args, arg)))
        fw.write("+" * 80)
        fw.write("results:")
        fw.write("+" * 80)
        fw.write("\n")
        for (k, v) in res:
            fw.write("{}: {}\n".format(k, v))
    return res
Beispiel #5
0
def heuristic_abstract_alg2(network: Network,
                            test_property: dict,
                            do_preprocess: bool = True,
                            sequence_length: int = 50,
                            verbose: bool = VERBOSE) -> Network:
    """
    abstract a given network according to alg2 method in the paper
    the abstraction is done on @sequence_length pairs that are chosen a.t alg2
    in the paper, those with the biggest difference which is the smallest
    :param network: Network represent the network
    :param test_property: Dict represents the property to check
    :param do_preprocess: Bool, is pre-proprocess required before abstraction
    :param sequence_length: Int, number of abstraction steps in each sequence
    :param verbose: Bool, verbosity flag
    :return: Network, abstract network according to alg 2 in the paper
    """
    if do_preprocess:
        preprocess(network)
    input_size = len(network.layers[0].nodes)
    # generate random inputs in the bound of the test property
    random_inputs = get_limited_random_inputs(input_size=input_size,
                                              test_property=test_property)
    nodes2edge_between_map = network.get_nodes2edge_between_map()
    union_pairs = None  # set initial value to verify that the loop's condition holds in the first iteration
    loop_iterations = 0
    # while no violation occurs, continue to abstract
    while not has_violation(network, test_property,
                            random_inputs) and union_pairs != []:
        loop_iterations += 1
        # the minimal value over all max differences between input edges' weights of couples of nodes of same ar_type.
        union_pairs = []
        for i, layer in enumerate(
                network.layers[FIRST_ABSTRACT_LAYER:-1]):  # 遍历隐藏层
            layer_index = i + FIRST_ABSTRACT_LAYER
            prev_layer = network.layers[layer_index - 1]
            layer_couples = layer.get_couples_of_same_ar_type()
            # calc max difference between input edges' weights of couples
            for n1, n2 in layer_couples:
                max_diff_n1_n2 = INT_MIN
                for prev_node in prev_layer.nodes:
                    in_edge_n1 = nodes2edge_between_map.get(
                        (prev_node.name, n1.name), None)
                    a = (0 if in_edge_n1 is None else in_edge_n1.weight)
                    in_edge_n2 = nodes2edge_between_map.get(
                        (prev_node.name, n2.name), None)
                    b = (0 if in_edge_n2 is None else in_edge_n2.weight)
                    if abs(a - b) > max_diff_n1_n2:
                        max_diff_n1_n2 = abs(a - b)
                union_pairs.append(([n1, n2], max_diff_n1_n2))
        if union_pairs:  # if union_pairs != []:
            # take the couples whose maximal difference is minimal
            best_sequence_pairs = sorted(union_pairs, key=lambda x: x[1])
            cur_abstraction_seq_len = 0
            for (pair, diff) in best_sequence_pairs:
                if cur_abstraction_seq_len >= sequence_length:
                    break
                if pair[0].name not in network.name2node_map or pair[
                        1].name not in network.name2node_map:
                    continue
                cur_abstraction_seq_len += 1
                union_name = "+".join([pair[0].name, pair[1].name])
                union_couple_of_nodes(network, pair[0], pair[1])
                nl_p2u = {pair[0].name: union_name, pair[1].name: union_name}
                finish_abstraction(network=network,
                                   next_layer_part2union=nl_p2u,
                                   verbose=verbose)
    return network