Beispiel #1
0
def test_using_gurobi(point, n, net_path):
    method = lambda x: np.argsort(x)[-2]
    gurobi_ptr = partial(GurobiSingleLayer, polyhedron_max_dim=1, use_relu=True, add_alpha_constraint=True,
                         use_counter_example=True)
    idx_max, other_idx = get_out_idx(point, n, net_path, method)
    print(idx_max, other_idx)
    res, queries_stats, alpha_history = adversarial_query(point, 0.01, idx_max, other_idx, net_path,
                                                          gurobi_ptr, n)
    assert res
Beispiel #2
0
def test_multilayer_large_n():
    # point = np.array([1.0] * 40)
    point = points[-1]
    net_path = multi_layer_paths[1]
    n = 12
    gurobi_ptr = partial(GurobiMultiLayer,
                         polyhedron_max_dim=1,
                         use_relu=True,
                         add_alpha_constraint=True,
                         use_counter_example=True)
    method = lambda x: np.argsort(x)[-2]
    idx_max, other_idx = get_out_idx(point, n, net_path, method)
    res, _, _ = adversarial_query(point, 0.01, idx_max, other_idx, net_path,
                                  gurobi_ptr, n)
    assert res
    return
Beispiel #3
0
def test_specific_multilayer():
    point = points[3]
    net_path = multi_layer_paths[0]
    n = 2

    print(point)
    print(net_path)
    print("n=", n)
    gurobi_ptr = partial(GurobiMultiLayer,
                         polyhedron_max_dim=1,
                         use_relu=True,
                         add_alpha_constraint=True,
                         use_counter_example=True,
                         debug=True,
                         max_steps=1)
    method = lambda x: np.argsort(x)[-2]
    idx_max, other_idx = get_out_idx(point, n, net_path, method)
    res, _, _ = adversarial_query(point, 0.01, idx_max, other_idx, net_path,
                                  gurobi_ptr, n)
    assert res
Beispiel #4
0
def run_exp_signle_time(points, radius, h5_file, t, only_rns=False, pbar=None, save_results=False):
    our_raw, rns_raw = [], []
    for j, point in enumerate(points):
        idx_max, other_idx = get_out_idx(point, t, h5_file, lambda x: np.argsort(x)[-2])
        # idx_max, other_idx = None, None
        rnsverify_time = -1
        rnsverify_time = rns_verify_query(h5_file, point, idx_max, other_idx, t, radius)

        our_time = -1
        if not only_rns:
            gurobi_ptr = partial(GurobiMultiLayer, use_relu=True, add_alpha_constraint=True,
                                 use_counter_example=True)
            try:
                start = timer()
                res, _, _ = adversarial_query(point, radius, idx_max, other_idx, h5_file, gurobi_ptr, t)
                our_time = timer() - start
            except ValueError:
                res = False
                our_time = -1
            assert res
        if pbar:
            pbar.update(1)

        our_raw.append(our_time)
        rns_raw.append(rnsverify_time)
        print('time: {}, point: {} our: {}, rns: {}'.format(t, j, our_time, rnsverify_time))

    if save_results:
        exp_name = 'verification time as a function of iterations, one rnn cell over {} points, time: {}'.format(
            len(points), t)
        file_name = "rns_{}time{}_{}.pkl".format('' if only_rns else 'ours_', t, time.strftime("%Y%m%d-%H%M%S"))
        pickle_path = os.path.join(PICKLE_DIR, file_name)
        print("#" * 100)
        print(" " * 20 + "PICKLE PATH: {}".format(pickle_path))
        print("#" * 100)
        pickle.dump({'our_raw': our_raw, 'rns_raw': rns_raw, 'exp_name': exp_name}, open(pickle_path, "wb"))

    return our_raw, rns_raw
Beispiel #5
0
def test_temp():
    # Note that we use use_relu = False
    import tempfile
    import tensorflow.keras as k
    pass_counter = 0
    total_tests = 100
    rnn_dim = 4
    # for _ in range(total_tests):
    with tempfile.NamedTemporaryFile(suffix=".h5") as fp:
        net_path = fp.name
        model = k.Sequential()
        # model.add(k.layers.SimpleRNN(2, input_shape=(None, 1), activation='relu', return_sequences=True))
        model.add(k.layers.SimpleRNN(rnn_dim, input_shape=(None, 1), activation='relu', return_sequences=False))
        model.add(k.layers.Dense(2, activation='relu'))
        w_h = np.random.uniform(-0.5, 0.5, (rnn_dim, rnn_dim))
        w_in = np.random.random(rnn_dim)[None, :]
        b = np.random.random(rnn_dim)
        # w_h = np.array([[0, 1.0], [1., 0.]])

        # model.layers[0].set_weights([np.array([1.0, 1.0])[None, :], w_h, np.array([0., 0.])])
        model.layers[0].set_weights([w_in, w_h, b])
        w_in_1 = np.random.random((rnn_dim, 2))
        # model.layers[1].set_weights([np.array([[2.0, 0], [0, 1.0]]), np.array([0., 0.])])
        model.layers[1].set_weights([w_in_1, np.array([0., 0.])])

        model.save(net_path)

        point = np.array([1.0])
        # net_path = './FMCAD_EXP/models/model_20classes_rnn8_fc32_fc32_fc32_0050.ckpt'
        n = 3
        method = lambda x: np.argsort(x)[-2]
        idx_max, other_idx = get_out_idx(point, n, net_path, method)
        gurobi_ptr = partial(GurobiMultiLayer, polyhedron_max_dim=1, use_relu=True, add_alpha_constraint=True,
                             use_counter_example=True)
        res, _, _ = adversarial_query(point, 0.01, idx_max, other_idx, net_path, gurobi_ptr, n)
        pass_counter += res
Beispiel #6
0
def run_all_experiments(net_options,
                        points,
                        t_range,
                        other_idx_method,
                        gurobi_ptr,
                        radius=0.01,
                        steps_num=1500,
                        save_results=True):
    # assert len(points) > 20
    results = defaultdict(list)
    if len(net_options) == 1:
        net_name = ''.join(net_options[0].split('.')[:-1]).split('/')[-1]
    else:
        net_name = ''
    pickle_path = os.path.join(
        OUT_FOLDER,
        'gurobi' + str(datetime.now()).replace('.', '').replace(' ', '') +
        "{}.pkl".format(net_name))
    print("starting fresh experiment", "\n", "#" * 100)
    partial_results = {}

    print("#" * 100, "\nwriting results to: {}".format(pickle_path), "\n",
          "#" * 100)
    counter = 0
    pbar = tqdm(total=len(other_idx_method) * len(points) * len(net_options) *
                len(t_range))
    for method in other_idx_method:
        for idx, point in enumerate(points):
            for path in net_options:
                if not os.path.exists(path):
                    path = os.path.join(MODELS_FOLDER, path)
                    if not os.path.exists(path):
                        raise FileNotFoundError(path)
                for t in t_range:
                    if counter < 0:
                        counter += 1
                        pbar.update(1)
                        have_point = True
                    else:
                        have_point = False
                        net_name = ''.join(path.split('.')[:-1]).split('/')[-1]
                        name = "{}_{}_{}".format(net_name, radius, t)

                        if name in partial_results:
                            for res in partial_results[name]:
                                if not have_point and res['t'] == t and \
                                        (('in_tensor' in res and np.all(res['in_tensor'] == point)) or
                                         ('in_tesnor' in res and np.all(res['in_tesnor'] == point))):
                                    # already have this result
                                    pbar.update(1)
                                    results[name].append(res)
                                    have_point = True
                    if not have_point:
                        idx_max, other_idx = get_out_idx(
                            point, t, path, method)
                        net_name = ''.join(path.split('.')[:-1]).split('/')[-1]

                        result = run_experiment(point,
                                                radius,
                                                idx_max,
                                                other_idx,
                                                path,
                                                gurobi_ptr,
                                                t,
                                                steps=steps_num)
                        result.update({
                            'h5_file': net_name,
                            't': t,
                            'other_idx': other_idx,
                            'in_tensor': point,
                            'steps_num': steps_num
                        })
                        results[name].append(result)
                        if not result['result']:
                            print("FAIL on point index: {}".format(idx))
                        pbar.update(1)
                        if save_results:
                            pickle.dump(results, open(pickle_path, "wb"))
    if save_results:
        parse_results_file(pickle_path)
    return results