コード例 #1
0
#!/usr/bin/env python3
"""This monument contains the CLI to interact with the client DB.

Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: [email protected]
"""
import logging
import sys

from lib import db_cli, config
from lib.base_client import UserType
from lib.logging import configure_root_loger

if __name__ == '__main__':  # pragma no cover
    configure_root_loger(logging.INFO, config.LOG_DIR + "client_db.log")
    log = logging.getLogger()
    db_cli.main(UserType.CLIENT, sys.argv[1:])
コード例 #2
0
"""
import argparse
import datetime
import logging
import sys
from typing import List

import lib.config as config
from lib.logging import configure_root_loger

sys.path.append(config.WORKING_DIR + 'cython/psi')
# Python Version of libPSI
# noinspection PyUnresolvedReferences
from cPSIInterface import PyPSIReceiver  # noqa

configure_root_loger(logging.INFO, config.LOG_DIR + "psi_receiver.log")
log = logging.getLogger()


def main(args: list) -> List[int]:
    """
    Start the PSI Receiver based on the given CL args.
    :param args: command line arguments (argv[1:])
    :return: List of received values
    """
    log.info("Starting PSI Receiver.")

    parser = argparse.ArgumentParser("PSI Receiver")
    parser.add_argument("SetSize",
                        help="Size of PSI Set.",
                        type=int,
コード例 #3
0
ファイル: __init__.py プロジェクト: COMSYS/parameter-exchange
def create_app(test_config=None,
               logging_level=config.LOGLEVEL,
               data_dir=config.DATA_DIR) -> Flask:
    """Factory function for flask app. Return a configured flask app object."""

    # Configure App
    app = Flask(__name__, instance_relative_config=True)
    redis_port = config.KEY_REDIS_PORT
    if test_config is not None and 'DATA_DIR' in test_config:
        data_dir = test_config['DATA_DIR']
    log_dir = data_dir + 'logs/'
    app.config.from_mapping(
        REDIS_PORT=redis_port,
        CELERY_BROKER_URL=config.KEY_CELERY_BROKER_URL,
        CELERY_RESULT_BACKEND=config.KEY_CELERY_BROKER_URL,
        HASHKEY_LEN=config.HASHKEY_LEN,
        KEY_RANDOMIZE_PORTS=config.RANDOMIZE_PORTS,
        OT_HOST=config.OT_HOST,
        OT_TLS=config.OT_TLS,
        DATA_DIR=data_dir,
        SQLALCHEMY_DATABASE_URI=f"sqlite:///{data_dir}/{config.KEYSERVER_DB}",
        SQLALCHEMY_TRACK_MODIFICATIONS=False,
    )

    if test_config is not None:
        # load the test config if passed in
        app.config.from_mapping(test_config)

    # ensure the instance folder exists
    os.makedirs(app.instance_path, exist_ok=True)
    os.makedirs(data_dir, exist_ok=True)

    # Update Logging with new values
    configure_root_loger(logging_level, log_dir + config.KEY_LOGFILE)

    # Update Celery
    celery_app.conf.update(app.config)

    # Update SQL Alchemy
    import key_server.key_database
    # noinspection PyUnresolvedReferences
    import lib.user_database
    # Needs to be imported so that table is created, too
    database.db.init_app(app)
    with app.app_context():
        database.db.create_all()

    # Include pages
    from key_server import main
    app.register_blueprint(main.bp)

    from key_server import client
    app.register_blueprint(client.bp)

    from key_server import provider
    app.register_blueprint(provider.bp)

    # Generate keys
    KeyServer(app.config['DATA_DIR'])

    if config.EVAL:
        print("************************************************************")
        print("Starting in Eval Mode!")
        print("************************************************************")

    return app
コード例 #4
0
import logging
import os
import pickle
import sys
import time
from typing import List, Tuple, Iterable

from memory_profiler import memory_usage

import lib.config as config
from lib.base_client import BaseClient, UserType, ServerType
from lib.helpers import parse_list, to_base64, print_time
from lib.logging import configure_root_loger
from lib.record import Record

configure_root_loger(logging.INFO, config.LOG_DIR + "data_provider.log")
log = logging.getLogger()


class DataProvider(BaseClient):
    """Data Provider Client for end users."""

    type = UserType.OWNER

    def _store_record_on_server(self, hash_val: bytes, ciphertext: dict,
                                owner: str) -> None:
        """
        Store the given record on the storage server.
        :param hash_val: [Bytes] Long hash of record as returned by records

        :param ciphertext: [Dict] encrypted record as returned by records
コード例 #5
0
# Constants -------------------------------------------------------------------
from lib.similarity_metrics import comp_offset_num, RelativeOffsetIterator
from .shared import lb

METRIC = RelativeOffsetIterator
ARGS = [(10, )]  # [(i,) for i in range(1, 1001, 1)]
ROUNDS = 10
RECORD_ID_LENGTH = range(1, 100, 1)  # Value of Lego data.
RECORD_ROUNDING = [[3 for i in range(100)]]
RECORD_TOTAL_LENGTH = 100
POSITIVE_ONLY = False
RESUME = False
DIRECTORY = config.EVAL_DIR + "metric" + "/"
os.makedirs(DIRECTORY, exist_ok=True)
# -----------------------------------------------------------------------------
log = configure_root_loger(logging.INFO, None)


def write_header(file_path: str, row_format: str) -> None:
    """Write eval header to file"""
    with open(file_path, 'w') as fd:
        fd.write("------------------------HEADER------------------------\n")
        fd.write(f"EVAL: Metric Eval\n")
        fd.write(f"Metric: {get_metric_name()}\n")
        fd.write(f"Metric Args: {str(ARGS)}\n")
        fd.write(f"Positive Only: {str(POSITIVE_ONLY)}\n")
        fd.write(f"Data source: Random Data\n")
        fd.write(f"Record Rounding: {RECORD_ROUNDING}\n")
        fd.write(f"Record ID Length: {RECORD_ID_LENGTH}\n")
        fd.write(f"Record Total Length: {RECORD_TOTAL_LENGTH}\n")
        fd.write(f"Rounds: {ROUNDS}\n")
コード例 #6
0
#!/usr/bin/env python3
"""This monument contains the CLI to interact with the owner DB.

Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: [email protected]
"""
import logging
import sys

from lib import db_cli, config
from lib.base_client import UserType
from lib.logging import configure_root_loger

if __name__ == '__main__':  # pragma no cover
    configure_root_loger(logging.INFO, config.LOG_DIR + "owner_db.log")
    log = logging.getLogger()
    db_cli.main(UserType.OWNER, sys.argv[1:])
コード例 #7
0
ファイル: __init__.py プロジェクト: COMSYS/parameter-exchange
def create_app(test_config=None,
               logging_level=config.LOGLEVEL,
               data_dir=config.DATA_DIR) -> Flask:
    """Factory function for flask app. Return a configured flask app object."""

    app = Flask(__name__, instance_relative_config=True)
    redis_port = config.STORAGE_REDIS_PORT
    if test_config is not None and 'DATA_DIR' in test_config:
        data_dir = test_config['DATA_DIR']
    log_dir = data_dir + 'logs/'
    app.config.from_mapping(
        SECRET_KEY='dev',  # TODO: Exchange
        REDIS_PORT=redis_port,
        CELERY_BROKER_URL=config.STORAGE_CELERY_BROKER_URL,
        CELERY_RESULT_BACKEND=config.STORAGE_CELERY_BROKER_URL,
        DATA_DIR=data_dir,
        PSI_HOST=config.PSI_HOST,
        PSI_TLS=config.PSI_TLS,
        RANDOMIZE_PORTS=config.RANDOMIZE_PORTS,
        SQLALCHEMY_DATABASE_URI=f"sqlite:///{data_dir}/{config.STORAGE_DB}",
        SQLALCHEMY_TRACK_MODIFICATIONS=False,
    )

    if test_config is not None:
        # load the test config if passed in
        app.config.from_mapping(test_config)

    os.makedirs(app.instance_path, exist_ok=True)
    os.makedirs(data_dir, exist_ok=True)

    # Update Logging with new values
    configure_root_loger(logging_level, log_dir + config.STORAGE_LOGFILE)

    # Update celery
    celery_app.conf.update(app.config)

    # Update SQL Alchemy
    import storage_server.storage_database
    # noinspection PyUnresolvedReferences
    import lib.user_database
    # Needs to be imported so that table is created, too
    database.db.init_app(app)
    # For bloom filter
    from storage_server.connector import get_storageserver_backend
    with app.app_context():
        database.db.create_all()
        # Initialize Bloom Filter
        get_storageserver_backend()._initialize_bloom_filter()
    # Include pages
    from storage_server import main
    app.register_blueprint(main.bp)

    from storage_server import client
    app.register_blueprint(client.bp)

    from storage_server import provider
    app.register_blueprint(provider.bp)

    if config.EVAL:
        print("************************************************************")
        print("Starting in Eval Mode!")
        print("************************************************************")

    return app
コード例 #8
0
# Constants -------------------------------------------------------------------
LOGLVL = logging.INFO
TIME_PER_UPLOAD = 10  # s
TIME_OFFSET = 100
DIRECTORY = config.EVAL_DIR + "provider" + "/"
os.makedirs(DIRECTORY, exist_ok=True)
DIFF = 100  # Maximal difference each entry of the records may have from target
ROUNDS = 10
NUM_UPLOADS = [1] + list(range(100, 1001, 100))
REC_LEN = 100
REC_ID_LEN = 10
REC_ROUND = [3 for _ in range(REC_ID_LEN)]
TLS = config.OT_TLS
MODE = "RANDOM"
log = logg.configure_root_loger(LOGLVL,
                                config.WORKING_DIR + 'data/dp_eval.log')
atexit.register(shutil.rmtree, config.TEMP_DIR, True)
if config.OT_TLS or config.PSI_TLS:
    raise RuntimeError("TLs should be disabled.")
# -----------------------------------------------------------------------------


def write_header(file_path: str, row_fmt: str):
    """Write header into csv File."""
    with open(file_path, 'w') as fd:
        fd.write("------------------------HEADER------------------------\n")
        fd.write(f"MODE: {MODE}\n")
        fd.write(f"TLS: {TLS}\n")
        # fd.write(f"Target: {TARGET}\n")
        fd.write(f"Num Uploads (% for Scenario Evals): {NUM_UPLOADS}\n")
        if MODE == "RANDOM":