def run_MLP(params, trows, vrows): config = config_reader.read_config(utils.abs_path_of("config/default.ini")) if not os.path.isdir(config.get_rel_path("PATHS", "checkpoint_dir")): utils.mkdir_recursive(config.get_rel_path("PATHS", "checkpoint_dir")) iris_runner = mlp.FCNRunner(config, params) # trows, vrows, test_rows, config) iris_runner.bind_training_dataqueue_dataframe(trows, params) iris_runner.bind_validation_dataqueue_dataframe(vrows) if "TEST" in config: test_path = config.get_rel_path("TEST", "test_file") with tf.name_scope("test_data"): # TODO check this with Yanli # test_rows = csv_reader.read_test_csv_dataframe(test_path, int(config["TEST"]["batch_size"])) test_rows = csv_reader.read_csv_dataframe(test_path) iris_runner.bind_test_dataqueue_dataframe(test_rows) iris_runner.initialize() if "TRAINING" in config: valid_acc, train_loss, train_auc, valid_loss = iris_runner.run_training_dataframe( trows, vrows) if "TEST" in config: iris_runner.run_test(test_rows) iris_runner.close_session() return 1 - valid_acc, train_loss, train_auc, valid_loss
def get_run_path(self, run_name, prefix_dir='', name_postfix=''): import os """ :param run_name: :param prefix_dir: :param name_postfix: :return: """ out_dir = os.path.join( prefix_dir, self.datagen.name, 'deb' if self.model.is_debug else 'rel', self.model.name, str(self.debug_samples) if self.is_debug else '', ) if not os.path.isfile(out_dir): utils.mkdir_recursive(out_dir) final_path = out_dir + '/' + run_name # calculate new folder name (if duplicate) if os.path.exists(final_path): i = 2 final_path += '_' + str(i) while os.path.exists(final_path): i += 1 final_path = final_path[:-1] + str(i) return final_path + name_postfix
def return_election(task): input_data = task.get_data()['input_data'] election_id = input_data['election_id'] session_ids = input_data['session_ids'] election = db.session.query(Election)\ .filter(Election.id == election_id).first() session_data = [] for session_id in session_ids: # read into a string the pubkey privdata_path = app.config.get('PRIVATE_DATA_PATH', '') pubkey_path = os.path.join(privdata_path, str(election_id), session_id, 'publicKey_json') pubkey_file = open(pubkey_path, 'r') pubkey = pubkey_file.read() pubkey_file.close() session_data.append(dict( session_id=session_id, pubkey=json.loads(pubkey) )) # publish the pubkey pubdata_path = app.config.get('PUBLIC_DATA_PATH', '') pub_session_path = os.path.join(pubdata_path, str(election_id), session_id) pubkey_path2 = os.path.join(pub_session_path, 'publicKey_json') if not os.path.exists(pub_session_path): mkdir_recursive(pub_session_path) shutil.copyfile(pubkey_path, pubkey_path2) # publish protInfo.xml too session_privpath = os.path.join(privdata_path, str(election_id), session_id) protinfo_privpath = os.path.join(session_privpath, 'protInfo.xml') protinfo_pubpath = os.path.join(pub_session_path, 'protInfo.xml') shutil.copyfile(protinfo_privpath, protinfo_pubpath) session = requests.sessions.Session() session.mount('http://', RejectAdapter()) callback_url = election.callback_url ret_data = { "status": "finished", "reference": { "election_id": election_id, "action": "POST /election" }, "session_data": session_data } print "callback_url, ", callback_url print dumps(ret_data) ssl_calist_path = app.config.get('SSL_CALIST_PATH', '') ssl_cert_path = app.config.get('SSL_CERT_PATH', '') ssl_key_path = app.config.get('SSL_KEY_PATH', '') print("\nFF callback_url2 " + callback_url) r = session.request('post', callback_url, data=dumps(ret_data), headers={'content-type': 'application/json'}, verify=ssl_calist_path, cert=(ssl_cert_path, ssl_key_path)) print r.text end_task()
def return_election(task): input_data = task.get_data()['input_data'] election_id = input_data['election_id'] session_ids = input_data['session_ids'] election = db.session.query(Election)\ .filter(Election.id == election_id).first() session_data = [] for session_id in session_ids: # read into a string the pubkey privdata_path = app.config.get('PRIVATE_DATA_PATH', '') pubkey_path = os.path.join(privdata_path, str(election_id), session_id, 'publicKey_json') pubkey_file = open(pubkey_path, 'r') pubkey = pubkey_file.read() pubkey_file.close() session_data.append( dict(session_id=session_id, pubkey=json.loads(pubkey))) # publish the pubkey pubdata_path = app.config.get('PUBLIC_DATA_PATH', '') pub_session_path = os.path.join(pubdata_path, str(election_id), session_id) pubkey_path2 = os.path.join(pub_session_path, 'publicKey_json') if not os.path.exists(pub_session_path): mkdir_recursive(pub_session_path) shutil.copyfile(pubkey_path, pubkey_path2) # publish protInfo.xml too session_privpath = os.path.join(privdata_path, str(election_id), session_id) protinfo_privpath = os.path.join(session_privpath, 'protInfo.xml') protinfo_pubpath = os.path.join(pub_session_path, 'protInfo.xml') shutil.copyfile(protinfo_privpath, protinfo_pubpath) session = requests.sessions.Session() callback_url = election.callback_url ret_data = { "status": "finished", "reference": { "election_id": election_id, "action": "POST /election" }, "session_data": session_data } print "callback_url, ", callback_url print dumps(ret_data) r = session.request('post', callback_url, data=dumps(ret_data), headers={'content-type': 'application/json'}, verify=False) print r.text end_task()
def run_MLP(params): config = config_reader.read_config(utils.abs_path_of("config/default.ini")) if not os.path.isdir(config.get_rel_path("PATHS", "checkpoint_dir")): utils.mkdir_recursive(config.get_rel_path("PATHS", "checkpoint_dir")) iris_runner = mlp.FCNRunner(config, params) # trows, vrows, test_rows, config) if "TRAINING" in config: with tf.name_scope("train_data"): #train_batch_size = config.getint("TRAINING", "batch_size") train_batch_size = params['batch_size'] stratified_task = config.get("TRAINING", "stratified_sampling", fallback="") trows = csv_reader.read_csv( config.get_rel_path("PATHS", "training_file"), train_batch_size, stratified_task, config) with tf.name_scope("validation_data"): vrows = csv_reader.read_csv( config.get_rel_path("PATHS", "validation_file"), config.getint("TRAINING", "validation_batch_size")) iris_runner.bind_training_dataqueue(trows, params) iris_runner.bind_validation_dataqueue(vrows) ''' if "TEST" in config: test_path = config.get_rel_path("TEST","test_file") with tf.name_scope("test_data"): test_rows = csv_reader.read_test_csv(test_path, int(config["TEST"]["batch_size"])) iris_runner.bind_test_dataqueue(test_rows) ''' iris_runner.initialize() if "TRAINING" in config: valid_loss = iris_runner.run_training() #if "TEST" in config: #iris_runner.run_test() return valid_loss
def execute(self): task = self.task input_data = task.get_data()['input_data'] election_id = input_data['election_id'] election = db.session.query(Election)\ .filter(Election.id == election_id).first() # 1. generate a session per question private_data_path = app.config.get('PRIVATE_DATA_PATH', '') election_private_path = os.path.join(private_data_path, str(election_id)) sessions = [] questions = json.loads(election.questions) i = 0 for question in questions: session_id = "%d-%s" % (i, str(uuid.uuid4())) # create stub.xml session_privpath = os.path.join(election_private_path, session_id) mkdir_recursive(session_privpath) # l = ["vmni", "-prot", "-sid", session_id, "-name", # election.title, "-nopart", str(election.num_parties), "-thres", # str(election.threshold_parties)] #subprocess.check_call(l, cwd=session_privpath) v_gen_protocol_info(session_id, str(election.id), election.num_parties, election.threshold_parties, session_privpath) # read stub file to be sent to all the authorities stub_path = os.path.join(session_privpath, 'stub.xml') stub_file = codecs.open(stub_path, 'r', encoding='utf-8') stub_content = stub_file.read() stub_file.close() sessions.append(dict( id=session_id, stub=stub_content )) session = Session( id=session_id, election_id=election_id, status='default', public_key='', question_number=i ) db.session.add(session) i += 1 db.session.commit() # 2. generate private info and protocol info files on each authority # (and for each question/session). Also, each authority might require # the approval of the task by its operator. priv_info_task = SequentialTask() for authority in election.authorities: subtask = SimpleTask( receiver_url=authority.orchestra_url, receiver_ssl_cert=authority.ssl_cert, action="generate_private_info", queue="orchestra_performer", data=dict( id=election_id, title = election.title, description = election.description, sessions=sessions, questions = election.questions, start_date = election.start_date, end_date = election.end_date, num_parties = election.num_parties, threshold_parties = election.threshold_parties, authorities=[a.to_dict() for a in election.authorities] ) ) priv_info_task.add(subtask) task.add(priv_info_task) # 3. merge the outputs into protInfo.xml files, send them to the # authorities, and generate pubkeys sequentially one session after the # other merge_protinfo_task = SimpleTask( receiver_url=app.config.get('ROOT_URL', ''), action="merge_protinfo", queue="orchestra_director", data=dict( election_id=election_id, session_ids=[s['id'] for s in sessions] ) ) task.add(merge_protinfo_task) # 4. send protInfo.xml to the original sender (we have finished!) return_election_task = SimpleTask( receiver_url=app.config.get('ROOT_URL', ''), action="return_election", queue="orchestra_director", data=dict( election_id=election_id, session_ids=[s['id'] for s in sessions] ) ) task.add(return_election_task)
import os import tensorflow as tf import config_reader import csv_reader import mlp import utils import sys, os config = config_reader.read_config(utils.abs_path_of("config/default.ini")) if not os.path.isdir(config.get_rel_path("PATHS", "checkpoint_dir")): utils.mkdir_recursive(config.get_rel_path("PATHS", "checkpoint_dir")) iris_runner = mlp.FCNRunner(config) # trows, vrows, test_rows, config) if "TRAINING" in config: with tf.name_scope("train_data"): train_batch_size = config.getint("TRAINING", "batch_size") stratified_task = config.get("TRAINING", "stratified_sampling", fallback="") trows = csv_reader.read_csv( config.get_rel_path("PATHS", "training_file"), train_batch_size, stratified_task, config) with tf.name_scope("validation_data"): vrows = csv_reader.read_csv( config.get_rel_path("PATHS", "validation_file"), config.getint("TRAINING", "validation_batch_size"))
def execute(self): task = self.task input_data = task.get_data()['input_data'] election_id = input_data['election_id'] election = db.session.query(Election)\ .filter(Election.id == election_id).first() # 1. generate a session per question private_data_path = app.config.get('PRIVATE_DATA_PATH', '') election_private_path = os.path.join(private_data_path, str(election_id)) sessions = [] questions = json.loads(election.questions) i = 0 for question in questions: session_id = "%d-%s" % (i, str(uuid.uuid4())) # create stub.xml session_privpath = os.path.join(election_private_path, session_id) mkdir_recursive(session_privpath) # l = ["vmni", "-prot", "-sid", session_id, "-name", # election.title, "-nopart", str(election.num_parties), "-thres", # str(election.threshold_parties)] #subprocess.check_call(l, cwd=session_privpath) v_gen_protocol_info(session_id, str(election.id), election.num_parties, election.threshold_parties, session_privpath) # read stub file to be sent to all the authorities stub_path = os.path.join(session_privpath, 'stub.xml') stub_file = codecs.open(stub_path, 'r', encoding='utf-8') stub_content = stub_file.read() stub_file.close() sessions.append(dict(id=session_id, stub=stub_content)) session = Session(id=session_id, election_id=election_id, status='default', public_key='', question_number=i) db.session.add(session) i += 1 db.session.commit() # 2. generate private info and protocol info files on each authority # (and for each question/session). Also, each authority might require # the approval of the task by its operator. priv_info_task = SequentialTask() for authority in election.authorities: subtask = SimpleTask( receiver_url=authority.orchestra_url, receiver_ssl_cert=authority.ssl_cert, action="generate_private_info", queue="orchestra_performer", data=dict( id=election_id, title=election.title, description=election.description, sessions=sessions, questions=election.questions, start_date=election.start_date, end_date=election.end_date, num_parties=election.num_parties, threshold_parties=election.threshold_parties, authorities=[a.to_dict() for a in election.authorities])) priv_info_task.add(subtask) task.add(priv_info_task) # 3. merge the outputs into protInfo.xml files, send them to the # authorities, and generate pubkeys sequentially one session after the # other merge_protinfo_task = SimpleTask( receiver_url=app.config.get('ROOT_URL', ''), action="merge_protinfo", queue="orchestra_director", data=dict(election_id=election_id, session_ids=[s['id'] for s in sessions])) task.add(merge_protinfo_task) # 4. send protInfo.xml to the original sender (we have finished!) return_election_task = SimpleTask( receiver_url=app.config.get('ROOT_URL', ''), action="return_election", queue="orchestra_director", data=dict(election_id=election_id, session_ids=[s['id'] for s in sessions])) task.add(return_election_task)
def return_election(task): input_data = task.get_data()['input_data'] election_id = input_data['election_id'] session_ids = input_data['session_ids'] election = db.session.query(Election)\ .filter(Election.id == election_id).first() session_data = [] for session_id in session_ids: # read into a string the pubkey privdata_path = app.config.get('PRIVATE_DATA_PATH', '') pubkey_path = os.path.join(privdata_path, str(election_id), session_id, 'publicKey_json') pubkey_file = open(pubkey_path, 'r') pubkey = pubkey_file.read() pubkey_file.close() session_data.append( dict(session_id=session_id, pubkey=json.loads(pubkey))) # publish the pubkey pubdata_path = app.config.get('PUBLIC_DATA_PATH', '') pub_session_path = os.path.join(pubdata_path, str(election_id), session_id) pubkey_path2 = os.path.join(pub_session_path, 'publicKey_json') if not os.path.exists(pub_session_path): mkdir_recursive(pub_session_path) shutil.copyfile(pubkey_path, pubkey_path2) # publish protInfo.xml too session_privpath = os.path.join(privdata_path, str(election_id), session_id) protinfo_privpath = os.path.join(session_privpath, 'protInfo.xml') protinfo_pubpath = os.path.join(pub_session_path, 'protInfo.xml') shutil.copyfile(protinfo_privpath, protinfo_pubpath) session = requests.sessions.Session() session.mount('http://', RejectAdapter()) callback_url = election.callback_url ret_data = { "status": "finished", "reference": { "election_id": election_id, "action": "POST /election" }, "session_data": session_data } print("callback_url, " + callback_url + ", data = ") print(dumps(ret_data)) ssl_calist_path = app.config.get('SSL_CALIST_PATH', '') ssl_cert_path = app.config.get('SSL_CERT_PATH', '') ssl_key_path = app.config.get('SSL_KEY_PATH', '') try: r = session.request('post', callback_url, data=dumps(ret_data), headers={'content-type': 'application/json'}, verify=ssl_calist_path, cert=(ssl_cert_path, ssl_key_path)) except Exception as e: print("exception posting callback = ") print(e) raise e print("received text = ") print(r.text) end_task()