예제 #1
0
def test_mnist_cnn(FLAGS):
    (x_train, y_train, x_test, y_test) = load_mnist_data()

    x_test_batch = x_test[:FLAGS.batch_size]
    y_test_batch = y_test[:FLAGS.batch_size]

    data = x_test_batch.flatten('C')
    print('Client batch size from FLAG:', FLAGS.batch_size)

    port = 34000

    encrypt_str = 'encrypt' if FLAGS.encrypt_data else 'plain'
    client = pyhe_client.HESealClient(FLAGS.hostname, port, FLAGS.batch_size,
                                      {'input': (encrypt_str, data)})

    results = client.get_results()
    results = np.round(results, 2)

    y_pred_reshape = np.array(results).reshape(FLAGS.batch_size, 10)
    with np.printoptions(precision=3, suppress=True):
        print(y_pred_reshape)

    y_pred = y_pred_reshape.argmax(axis=1)
    print('y_pred', y_pred)
    y_true = y_test_batch.argmax(axis=1)

    correct = np.sum(np.equal(y_pred, y_true))
    acc = correct / float(FLAGS.batch_size)
    print('pred size', len(y_pred))
    print('correct', correct)
    print('Accuracy (batch size', FLAGS.batch_size, ') =', acc * 100., '%')
예제 #2
0
def test_network(FLAGS):
    (x_train, y_train, x_test,
     y_test) = load_mnist_data(FLAGS.start_batch, FLAGS.batch_size)
    data = x_test.flatten("C")

    client = pyhe_client.HESealClient(
        FLAGS.hostname,
        FLAGS.port,
        FLAGS.batch_size,
        {FLAGS.tensor_name: (FLAGS.encrypt_data_str, data)},
    )

    results = np.round(client.get_results(), 2)

    y_pred_reshape = np.array(results).reshape(FLAGS.batch_size, 10)
    with np.printoptions(precision=3, suppress=True):
        print(y_pred_reshape)

    y_pred = y_pred_reshape.argmax(axis=1)
    print("y_pred", y_pred)

    correct = np.sum(np.equal(y_pred, y_test.argmax(axis=1)))
    acc = correct / float(FLAGS.batch_size)
    print("correct", correct)
    print("Accuracy (batch size", FLAGS.batch_size, ") =", acc * 100.0, "%")
예제 #3
0
def perform_inference(test_data, test_data_labels, parameters):
    """
    Performs inference. Based on: https://github.com/IntelAI/he-transformer/blob/master/examples/MNIST/pyclient_mnist.py

    :param test_data: The test data.
    :param test_data_labels: The test labels.
    :param parameters: The parameters.
    :return: The number of correct predictions.
    """
    num_classes = test_data_labels.shape[1]
    test_data_flat = test_data.flatten("C")

    start_time = time.time()
    client = pyhe_client.HESealClient(
        parameters.hostname, parameters.port, parameters.batch_size, {
            parameters.tensor_name:
            (parameters.encrypt_data_str, test_data_flat)
        })

    print("Waiting for results.")
    prediction_scores = np.array(client.get_results()).reshape(
        parameters.batch_size, num_classes)
    end_time = time.time()
    print("Got predictions with shape {} in time: {}".format(
        prediction_scores.shape, end_time - start_time))
    correct_predictions = calculate_num_correct_predictions(
        prediction_scores, test_data_labels)
    num_samples = test_data_labels.shape[0]
    print('HE-Transformer: {}/{} Test set: Accuracy: ({:.4f})'.format(
        correct_predictions, num_samples, correct_predictions / num_samples))
    return correct_predictions
예제 #4
0
def run_client(FLAGS, data):
    port = FLAGS.port
    if isinstance(port, list) or isinstance(port, tuple):
        print("WARNING: list ports were passed. Only one should be passed.")
        port = port[0]  # only one port should be passed
    if FLAGS.batch_size > 1:
        raise ValueError('batch size > 1 not currently supported.')
    inference_start = time.time()
    client = pyhe_client.HESealClient(
        FLAGS.hostname,
        port,
        FLAGS.batch_size,
        {
            "import/input"
            # FLAGS.tensor_name
            : (FLAGS.encrypt_data_str, data)},
    )
    print(f"data shape: {data.shape}")
    r_rstar = np.array(client.get_results())
    inference_end = time.time()
    print(f"Inference time: {inference_end - inference_start}s")
    with open(inference_times_name, 'a') as outfile:
        outfile.write(str(inference_end - inference_start))
        outfile.write('\n')
    print('r_rstar (r-r*): ', array_str(r_rstar))

    rstar = FLAGS.r_star
    if rstar is None:
        raise ValueError('r_star should be provided but was None.')

    r_rstar = round_array(x=r_rstar, exp=FLAGS.round_exp)
    print('rounded r_rstar (r-r*): ', array_str(r_rstar))
    print("Writing out logits file to txt.")
    with open(f'{out_client_name}{port}privacy.txt', 'w') as outfile:
        for val in r_rstar.flatten():
            outfile.write(f"{int(val)}\n")

    # do 2 party computation with each Answering Party
    msg = 'starting 2pc with Answering Party'
    print(msg)
    log_timing(stage='client:' + msg,
               log_file=FLAGS.log_timing_file)
    # completed = {port: False for port in flags.ports}
    max_t = time.time() + 100000
    while not os.path.exists(f"{out_final_name}{port}privacy.txt"):
        print(f'client starting 2pc with port: {port}')
        process = subprocess.Popen(
            ['./gc-emp-test/bin/argmax_1', '2', '12345',
             f'{out_client_name}{port}privacy.txt'])
        time.sleep(1)
        if time.time() > max_t:
            raise ValueError("Step 1' of protocol never finished. Issue.")
    log_timing(stage='client:finished 2PC',
               log_file=FLAGS.log_timing_file)
    return r_rstar, rstar
예제 #5
0
def main(FLAGS):
    data = (2, 4, 6, 8)

    batch_size = 1

    client = pyhe_client.HESealClient(
        FLAGS.hostname, FLAGS.port, batch_size,
        {"client_parameter_name": ("encrypt", data)})

    results = client.get_results()
    print("results", results)
예제 #6
0
def main(FLAGS):
    data = (1, 2, 3, 4)

    port = 34000
    batch_size = 1

    client = pyhe_client.HESealClient(
        FLAGS.hostname, port, batch_size,
        {'client_parameter_name': ('encrypt', data)})

    results = client.get_results()

    print('results', results)
예제 #7
0
def main(FLAGS):
    util.VAL_IMAGE_FLAGS = FLAGS

    imagenet_inference_labels = get_imagenet_inference_labels()
    imagenet_training_labels = get_imagenet_training_labels()
    assert (
        sorted(imagenet_training_labels) == sorted(imagenet_inference_labels))
    validation_nums = get_validation_labels(FLAGS)
    x_test = get_validation_images(FLAGS)
    validation_labels = imagenet_inference_labels[validation_nums]

    if FLAGS.batch_size < 10:
        print('validation_labels', validation_labels)

    (batch_size, width, height, channels) = x_test.shape
    print('batch_size', batch_size)
    print('width', width)
    print('height', height)
    print('channels', channels)

    x_test_flat = x_test.flatten(order='C')
    hostname = 'localhost'
    port = 34000

    if 'NGRAPH_COMPLEX_PACK' in os.environ:
        complex_packing = str2bool(os.environ['NGRAPH_COMPLEX_PACK'])
    else:
        complex_packing = False

    client = pyhe_client.HESealClient(FLAGS.hostname, port, batch_size,
                                      x_test_flat, complex_packing)

    while not client.is_done():
        time.sleep(1)
    results = client.get_results()

    imagenet_labels = get_imagenet_labels()
    results = np.array(results)

    if (FLAGS.batch_size == 1):
        top5 = results.argsort()[-5:]
    else:
        results = np.reshape(results, (FLAGS.batch_size, 1001))
        top5 = np.flip(results.argsort()[:, -5:], axis=1)

    preds = imagenet_labels[top5]
    print('validation_labels', validation_labels)
    print('top5', preds)

    util.accuracy(preds, validation_labels)
예제 #8
0
def test_network(FLAGS):
    (x_train, y_train, x_test,
     y_test) = load_fhir_data(FLAGS.start_batch, FLAGS.batch_size)
    data = x_test.flatten("C")

    client = pyhe_client.HESealClient(
        FLAGS.hostname,
        FLAGS.port,
        FLAGS.batch_size,
        {FLAGS.tensor_name: (FLAGS.encrypt_data_str, data)},
    )
    results = np.array(client.get_results()).reshape(FLAGS.batch_size, 2)

    y_pred = np.argmax(results, 1)
    print("y_pred", y_pred)
예제 #9
0
def main(FLAGS):
    data = (1, 2, 3, 4)

    port = 34000
    batch_size = 1

    client = pyhe_client.HESealClient(FLAGS.hostname, port, batch_size, data,
                                      False)

    while not client.is_done():
        time.sleep(1)

    results = client.get_results()

    print('results', results)
def test_network(FLAGS):
    x_test = Dataset.get_user("dataset/npy/", FLAGS.user)
    data = x_test.flatten("C")

    client = pyhe_client.HESealClient(
        FLAGS.hostname,
        FLAGS.port,
        FLAGS.batch_size,
        {FLAGS.tensor_name: (FLAGS.encrypt_data_str, data)},
    )

    results = np.round(client.get_results(), 2)

    y_pred_reshape = np.array(results).reshape(FLAGS.batch_size, 9)
    with np.printoptions(precision=3, suppress=True):
        print(y_pred_reshape)

    y_pred = y_pred_reshape.argmax(axis=1)
    print("y_pred", y_pred)
예제 #11
0
def test_mnist_cnn(FLAGS):
    (x_train, y_train, x_test, y_test) = load_mnist_data()

    batch_size = FLAGS.batch_size
    x_test_batch = x_test[:batch_size]
    y_test_batch = y_test[:FLAGS.batch_size]

    data = x_test_batch.flatten('C')
    print('Client batch size from FLAG:', batch_size)

    complex_packing = False
    if ('NGRAPH_COMPLEX_PACK' in os.environ):
        complex_packing = str2bool(os.environ['NGRAPH_COMPLEX_PACK'])

    hostname = 'localhost'
    port = 34000

    print('complex_packing?', complex_packing)

    client = pyhe_client.HESealClient(hostname, port, batch_size, data,
                                      complex_packing)

    print('Sleeping until client is done')
    while not client.is_done():
        time.sleep(1)

    results = client.get_results()
    results = np.round(results, 2)

    y_pred_reshape = np.array(results).reshape(batch_size, 10)
    with np.printoptions(precision=3, suppress=True):
        print(y_pred_reshape)

    y_pred = y_pred_reshape.argmax(axis=1)
    print('y_pred', y_pred)
    y_true = y_test_batch.argmax(axis=1)

    correct = np.sum(np.equal(y_pred, y_true))
    acc = correct / float(batch_size)
    print('pred size', len(y_pred))
    print('correct', correct)
    print('Accuracy (batch size', batch_size, ') =', acc * 100., '%')
예제 #12
0
def main(FLAGS):
    imagenet_inference_labels = get_imagenet_inference_labels()
    imagenet_training_labels = get_imagenet_training_labels()
    assert (
        sorted(imagenet_training_labels) == sorted(imagenet_inference_labels))
    validation_nums = get_validation_labels(FLAGS)
    x_test = get_validation_images(FLAGS)
    validation_labels = imagenet_inference_labels[validation_nums]

    if FLAGS.batch_size < 10:
        print('validation_labels', validation_labels)

    (batch_size, width, height, channels) = x_test.shape
    print('batch_size', batch_size)
    print('width', width)
    print('height', height)
    print('channels', channels)

    x_test_flat = x_test.flatten(order='C')
    port = 34000

    client = pyhe_client.HESealClient(FLAGS.hostname, port, batch_size,
                                      {'input': ('encrypt', x_test_flat)})

    results = client.get_results()

    imagenet_labels = get_imagenet_labels()
    results = np.array(results)

    if (FLAGS.batch_size == 1):
        top5 = results.argsort()[-5:]
    else:
        results = np.reshape(results, (FLAGS.batch_size, 1001))
        top5 = np.flip(results.argsort()[:, -5:], axis=1)

    preds = imagenet_labels[top5]
    print('validation_labels', validation_labels)
    print('top5', preds)

    util.accuracy(preds, validation_labels)
예제 #13
0
#  Licensed under the Apache License, Version 2.0 (the "License");
#  you may not use this file except in compliance with the License.
#  You may obtain a copy of the License at
#
#      http://www.apache.org/licenses/LICENSE-2.0
#
#  Unless required by applicable law or agreed to in writing, software
#  distributed under the License is distributed on an "AS IS" BASIS,
#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#  See the License for the specific language governing permissions and
#  limitations under the License.
# ==============================================================================

import pyhe_client
import time

data = (1, 2, 3, 4)

hostname = 'localhost'
port = 34000
batch_size = 1

client = pyhe_client.HESealClient(hostname, port, batch_size, data, False)

while not client.is_done():
    time.sleep(1)

results = client.get_results()

print('results', results)
예제 #14
0
def run_client(FLAGS, data=None, labels=None):
    if data is None:
        data = np.load(consts.input_data)
    if labels is None:
        labels = np.load(consts.input_labels)

    r_rstars = {}
    for i, port in enumerate(FLAGS.ports):

        client = pyhe_client.HESealClient(
            FLAGS.hostname,
            port,
            FLAGS.batch_size,
            {FLAGS.tensor_name: (FLAGS.encrypt_data_str, data)},
        )

        raw_results = np.array(client.get_results())
        print('raw results: ', array_str(raw_results))

        rstar = None
        if FLAGS.debug is True:
            if FLAGS.rstar is not None:
                raise Exception(
                    "Either debug or r_star or both flags have to be None.")
            raw_shape_0 = raw_results.shape[0]
            expected_shape_0 = 2 * FLAGS.batch_size
            if raw_shape_0 != expected_shape_0:
                raise Exception(
                    f'Expected r_star for each example in the batch'
                    f'and dim 0 size of the result: {expected_shape_0}'
                    f', but received result with dim 0 size:'
                    f' {raw_shape_0}')
            r_rstar = raw_results[:FLAGS.batch_size]
            rstar = raw_results[FLAGS.batch_size:]

        else:
            if FLAGS.rstar is None:
                rstar = None
            elif FLAGS.rstar == [-1.0]:
                raise Exception('We do not generate r_star in the client.'
                                'r_star provided is [-1.0].')
            if FLAGS.rstar is not None:
                rstar = np.array(FLAGS.rstar)
            r_rstar = raw_results

        print('r_rstar (r-r*): ', array_str(r_rstar))

        if FLAGS.round_exp:
            # r_rstar = (r_rstar * 2 ** FLAGS.round_exp).astype(np.int64)
            r_rstar = round_array(x=r_rstar, exp=FLAGS.round_exp)
            print('rounded r_rstar (r-r*): ', array_str(r_rstar))

        r_rstars[port] = r_rstar
        y_pred_reshape = np.array(r_rstar).reshape(FLAGS.batch_size, 10)

        y_labels = labels.argmax(axis=1)
        print("y_test: ", y_labels)

        y_pred = y_pred_reshape.argmax(axis=1)
        print("y_pred: ", y_pred)

        correct = np.sum(np.equal(y_pred, y_labels))
        acc = correct / float(FLAGS.batch_size)
        print("correct from original result: ", correct)
        print(
            "Accuracy original result (batch size", FLAGS.batch_size, ") =",
            acc * 100.0, "%")
        with open(f'{out_client_name}{port}privacy.txt', 'w') as outfile:
            for val in y_pred_reshape.flatten():
                outfile.write(f"{int(val)}\n")

        if rstar is not None:
            results_r = y_pred_reshape + rstar
            y_pred_r = results_r.argmax(axis=1)
            print('y_pred_r: ', y_pred_r)
            correct = np.sum(np.equal(y_pred_r, y_labels))
            acc = correct / float(FLAGS.batch_size)
            print("correct after adding r*: ", correct)
            print(
                "Accuracy after adding r* (batch size", FLAGS.batch_size, ") =",
                acc * 100.0, "%")

        print(port, "DONE----------------------")
        time.sleep(5)

    # do 2 party computation with each Answering Party
    print('starting 2pc')
    completed = {port: False for port in FLAGS.ports}
    n_parties = len(completed.keys())
    max_t = time.time() + 100000
    processes = []
    while sum(completed.values()) < n_parties and time.time() < max_t:
        for port in completed.keys():
            if not completed[port]:
                if not os.path.exists(f"{out_client_name}{port}privacy.txt"):
                    raise ValueError('something broke')
                out_server_file = f"{out_server_name}{port}privacy.txt"
                if os.path.exists(out_server_file):
                    if FLAGS.predict_labels_file is not None:
                        predict_labels_file = FLAGS.predict_labels_file + str(
                            port) + '.npy'
                        predict_labels = np.load(predict_labels_file)
                        check_rstar_file_stage1(
                            rstar_file=out_server_file,
                            r_rstar=r_rstars[port],
                            labels=predict_labels,
                            port=port,
                        )
                    print(f'client starting 2pc with port: {port}')
                    completed[port] = True
                    process = subprocess.Popen(
                        ['./gc-emp-test/bin/argmax_1', '2', '12345',
                         f'{out_client_name}{port}privacy.txt'])
                    process.wait()
                    processes.append(process)
                else:
                    print(
                        f'Expected output file {out_server_file} from the party {port} does not exist yet!')
    max_t = time.time() + 10000
    while any([p.poll() for p in
               processes]) is None:  # wait on all argmaxs to finish first
        time.sleep(1)
        if time.time() > max_t:
            raise ValueError(
                f'something broke while waiting on processes for 2pc with servers: {[p.poll() for p in processes]}')
    print("Prepping for 2pc with CSP")
    if not sum(completed.values()) == n_parties:
        raise ValueError('a 2pc with a server failed')

    r_rstars = []
    for port in FLAGS.ports:
        with open(f'output{port}privacy.txt', 'r') as infile:
            r_rstar = []
            for line in infile:
                r_rstar.append(int(line))
            r_rstars.append(r_rstar)
    r_rstars = np.array(r_rstars, np.int64)
    print(r_rstars)
    print('done')

    if FLAGS.final_call:
        fs = [f"output{port}privacy.txt" for port in FLAGS.ports]
        array_sum = csp.sum_files(fs)
        print(array_sum)
        with open("output.txt", 'w') as outfile:
            for v in array_sum.flatten():
                outfile.write(f'{v}\n')
        csp_filenames = [f'noise{port}privacy.txt' for port in FLAGS.ports]
        label = csp.get_histogram(
            client_filename='output.txt',
            csp_filenames=csp_filenames,
            csp_sum_filename='final.txt')
        print(label)
예제 #15
0
파일: smart.py 프로젝트: cschmidat/encquire
    return response.json()

def test_network(FLAGS, xtest):
        """Encrypt data and send to inference server.
        FLAGS: Client flags.
        xtest: ndarray with EHR data.
        :return: float with mortality prediction"""
    (x_train, y_train, x_test, y_test) = load_fhir_data(
        0, 1)
    x_test = xtest.to_numpy().astype("float32")
    data = x_test.flatten("C")
    print(data)
    print(FLAGS)
    client = pyhe_client.HESealClient(
        hostname,
        FLAGS.port,
        1,
        {FLAGS.tensor_name: ("encrypt", data)},
    )
    results = np.array(client.get_results()).reshape(FLAGS.batch_size,2)
    print(results)

    

    y_pred = np.argmax(results,1)
    print("y_pred", y_pred)
    return y_pred


app = Flask(__name__)

@app.route('/', methods=['GET'])