class MetaNode(object): def __init__(self, tables): self.db_info = DBInfo(tables=tables) self.oracle = Oracle() def GetTableInfoByID(self, table_id): return self.db_info.GetTableInfoByID(table_id) def GetTableInfoByName(self, table_name): ''' @rtype: TableInfo ''' return self.db_info.GetTableInfoByName(table_name) def GetColumnInfoByID(self, table_id, column_id): return self.db_info.GetColumnInfoByID(table_id, column_id) def GetColumnInfoByName(self, table_name, column_name): ''' @rtype: ColumnInfo ''' return self.db_info.GetColumnInfoByName(table_name, column_name) def GetRowID(self, table_id): return self.db_info.GetRowID(table_id) def GetTimestamp(self): return self.oracle.GetTimestamp() def IsExpired(self, lockTimestamp, TTL): return self.oracle.IsExpired(lockTimestamp, TTL)
def create_oracle(mpc, set_vrep, abs_frac, abs_err, rel_err): """ Creates the optimization problem oracle. Parameters ---------- mpc : MPC The control law to be used. set_vrep : np.array Vertex representation of the set to be partitioned (every row is a vertex). abs_frac : float, optional Fraction (0,1) away from the origin of the full size of the invariant set where to compute the absolute error. abs_err : float, optional Absolute error value. If provided, takes precedence over abs_frac. rel_err : float Relative error value. Returns ------- oracle : Oracle Optimization problem oracle. """ if abs_err is None: oracle = Oracle(mpc, eps_a=1., eps_r=1.) abs_err = np.max([ oracle.P_theta(theta=vx)[2] for vx in [abs_frac * vx for vx in set_vrep] ]) oracle = Oracle(mpc, eps_a=abs_err, eps_r=rel_err) return oracle
def main(args): oracle = Oracle(args) samples_dict = oracle.initializeDataset(save=False, returnData=True) scores = samples_dict["scores"] samples_mat = samples_dict["samples"] seq_letters = oracle.numbers2letters(samples_mat) seq_ints = [ "".join([str(el) for el in seq if el > 0]) for seq in samples_mat ] if isinstance(scores, dict): scores.update({"letters": seq_letters, "indices": seq_ints}) df = pd.DataFrame(scores) else: df = pd.DataFrame({ "letters": seq_letters, "indices": seq_ints, "scores": scores }) if args.output: output_yml = Path(args.output).with_suffix(".yml") with open(output_yml, "w") as f: yaml.dump(numpy2python(namespace2dict(args)), f, default_flow_style=False) if args.no_indices: df.drop(columns="indices", inplace=True) df.to_csv(args.output)
def __init__(self, idclient=None, idcontrat=None, logger=None): self.logger = logger or logging.getLogger(__name__) self.__oracle = Oracle() self.xml_contrat = [] self.current_dir = os.path.dirname(os.path.realpath(__file__)) self.idclient = idclient self.idcontrat = idcontrat self.get_xml_active_contrat()
def test_nullifyTax(): oracle = Oracle() dic = { 'tax_code': '123456789012', 'tax_nbr': '000000100', 'oper_staff_id': 13, 'remark': '作废' } print nullifyTax(oracle, dic) oracle.commit()
def execute(self): # display controller parameters self.output_params() # initialize driver initializer = DriverInitializer(debug=self.debug) driver = initializer.get_driver() # login and refine research login_agent = LoginAgent(driver, job_title=self.job_title, job_location=self.job_location) login_agent.login() refine_agent = RefineAgent(driver, distance=self.distance, experience=self.experience, order_by_date=self.order_by_date) refine_agent.refine() # create PageLooper object looper = PageLooper(driver, limit=self.limit, duration=self.duration, date_limit=self.date_limit) result = looper.loop() # close driver and all windows if not self.debug: driver.quit() # get data from PageLooper object input_data, summaries, links = looper.get_all() if self.mode == 'browse': # use oracle to process input_data oracle = Oracle() output = oracle.query(input_data) # stop timer and record time elapsed in minutes runtime_duration = round((time.time() - self.timer) / 60) # use reporter to process output reporter = Reporter() reporter.report(output, summaries, links, runtime_duration, self.confidence_threshold) else: # save input_data with open(self.feature_path, 'w') as output: for entry in input_data: output.write(entry) output.write('<<END>>\n\n') # return driver return driver
class UpdateXmlContrat(object): """class to update new Xml on Oracle database, with restarting contrat """ def __init__(self, logger=None): self.logger = logger or logging.getLogger(__name__) self.__oracle = Oracle(mode="admin") self.__oracle._connect() # self.__oracle._close() def update_xml(self, new_zips): for nzip in new_zips: xml_content_clob = nzip.get('xml') new_xml_clob = self.__oracle.cursor.var(cx_Oracle.CLOB) new_xml_clob.setvalue(0, xml_content_clob) idcontrat = nzip.get("idcontrat") idcontrat_var = self.__oracle.cursor.var(cx_Oracle.NUMBER) idcontrat_var.setvalue(0, idcontrat) # update PARAMETRERAPPLICATION self.__oracle.cursor.execute( """ UPDATE SITECENTRAL.PARAMETREAPPLICATION SET PARAMETRESAPPLICATION = :parametreapp WHERE IDCONTRAT = :idcontrat AND TYPEAPPLICATION = 41""", parametreapp=new_xml_clob, idcontrat=idcontrat_var) self.__oracle.connection.commit() self.logger.info("Success : Update idcontrat %s !!" % idcontrat) # self.__oracle._close() def restart_contrat(self, idcontrat, idlogin, timeout=1): self.__suspend_contrat(idcontrat, idlogin) sleep(timeout) self.__resume_contrat(idcontrat, idlogin) def close(self): self.__oracle._close() def __suspend_contrat(self, idcontrat, idlogin): idcontrat_var = self.__oracle.cursor.var(cx_Oracle.NUMBER) idcontrat_var.setvalue(0, idcontrat) idlogin_var = self.__oracle.cursor.var(cx_Oracle.STRING) idlogin_var.setvalue(0, idlogin) try: self.logger.info("-> (-) Suspend contrat : %s" % idcontrat) self.__oracle.cursor.callproc("APICONTRAT.SuspendreContrat", [idcontrat_var, idlogin_var]) except Exception, ex: self.logger.error( "Error: during suspending contrat : %s, cause: %s" % (idcontrat, ex))
def __init__(self, config=None, mysql=None, db_migrate=None): self.cli = CLI() self.config = config or {} self.sgdb = mysql if self.sgdb is None and not self.config.get("new_migration"): if self.config.get("db_engine") is 'mysql': from mysql import MySQL self.sgdb = MySQL(config) elif self.config.get("db_engine") is 'oracle': from oracle import Oracle self.sgdb = Oracle(config) self.db_migrate = db_migrate or SimpleDBMigrate(config)
def main(): if len(sys.argv) < 2: data = bytearray( 'I, the server, hereby agree that I will pay $100 to this student.' .encode('ascii')) data = data[:32] else: data_file = open(sys.argv[1]) data = data_file.read() data_file.close() try: oracle = Oracle() oracle.connect() tag = oracle.mac(data, len(data)) ret = oracle.vrfy(data, len(data), tag) print(ret) if ret == 1: print("Message verified successfully!") else: print("Message verification failed.") finally: oracle.disconnect()
def run(self): board_one = [] board_two = [] for i in range(7): board_one.append(Minion(10, 10)) for i in range(7): board_two.append(Minion(1, 1)) oracle = Oracle(board_one, board_two) result = oracle.calculate_game_result_density() self.assertEqual(result.player_one_win_percentage, 100) self.assertEqual(result.player_two_win_percentage, 0) self.assertEqual(result.lethal_one, 0) self.assertEqual(result.lethal_two, 0) self.assertEqual(result.draw, 0)
def __init__(self, config, sgdb=None): Main._check_configuration(config) self.cli = CLI() self.config = config self.log = LOG(self.config.get("log_dir", None)) self.sgdb = sgdb if self.sgdb is None and not self.config.get("new_migration", None): if self.config.get("database_engine") == 'mysql': from mysql import MySQL self.sgdb = MySQL(config) elif self.config.get("database_engine") == 'oracle': from oracle import Oracle self.sgdb = Oracle(config) elif self.config.get("database_engine") == 'mssql': from mssql import MSSQL self.sgdb = MSSQL(config) elif self.config.get("database_engine") == 'postgresql': from postgresql import PostgreSQL self.sgdb = PostgreSQL(config) else: raise Exception("engine not supported '%s'" % self.config.get("database_engine")) self.db_migrate = SimpleDBMigrate(self.config)
def train(self, trees, batch_size=64, epochs=1000, dropout_keep_prob=0.5): examples = [] for i, tree_ in enumerate(trees): if i % 500 == 0: print("Tree", i, file=sys.stderr) examples.extend(list(Oracle(tree_, self))) batcher = self._to_batches(examples) for epoch in range(epochs): batch = batcher(batch_size) feed_dict = { self.input_words: batch[0], self.input_tags: batch[1], self.input_labels: batch[2], self.expected: batch[3], self.dropout_keep_prob: dropout_keep_prob, } self.sess.run(self.optimizer, feed_dict=feed_dict) if epoch % 100 == 0: train_accuracy = self.sess.run(self.accuracy, feed_dict=feed_dict) print('epoch {0}, training accuracy {1}'.format( epoch, train_accuracy))
class Batcher: def __init__(self, batch_size=1024): self.batch_size = batch_size self.oracle = Oracle() def set_batch_size(self, batch_size: int): self.batch_size = batch_size # make predictions one batch at a time, then aggregate the results and return def batch_predict(self, input_data): output = list() while input_data: batch = self.next_batch(input_data) output.append(self.oracle.query(batch)) output = np.concatenate(output) print('{} results predicted'.format(len(output))) return tf.convert_to_tensor(output, dtype=tf.float32) # pop off a batch from the beginning and return # if no more data, return None def next_batch(self, input_data) -> list: batch = list() for i in range(self.batch_size): if input_data: batch.append(input_data.pop(0)) else: break return batch
def genExcludeCaseGroup(self,filterFunc=None): x1,x2=None,None merge=False compliant=False while not (merge and compliant): x1='a'+choice(self.points) x2='a'+choice(self.points) merge=Oracle.mergeDecision(x1,x2) compliant=filterFunc(x1,x2,merge) if filterFunc else True s=Schema(x1,x2) R=s.expandX()[0] iSB=s.scoring2exclude(R) exP,exQ=iSB.excludeInputModels() exP_Y=Oracle.excludeOnMerge(x1) exQ_Y=Oracle.excludeOnMerge(x2) return zip(exP,[i in exP_Y for i in range(len(x1))])+zip(exQ,[i in exQ_Y for i in range(len(x2))])
def setUp(self): self.g1 = Gene('g1') self.p1 = Protein('p1') self.met1 = Metabolite('met1') self.met2 = Metabolite('met2') self.cplx1 = Complex('cplx1') self.cytosol = Cytosol() self.cond1 = PresentEntity(self.met1, self.cytosol) self.cond2 = PresentEntity(self.met2, self.cytosol) self.cond3 = PresentEntity(self.p1, self.cytosol) self.cond4 = PresentEntity(self.cplx1, self.cytosol) self.growth = Growth('growth', [self.cond2]) self.r1 = Reaction('r1', [self.cond1], [self.cond2]) self.r2 = Reaction('r2', [self.cond3], [self.cond4]) self.entities = [self.g1, self.p1, self.met1, self.met2, self.cplx1] self.compartments = [self.cytosol] self.activities = [self.growth, self.r1, self.r2] self.setup_conds = [self.cond1, self.cond3] self.mod1 = Model('m0', self.setup_conds, [self.growth, self.r1], []) self.mod2 = Model('m1', self.setup_conds, [self.growth, self.r2], []) self.oracle = Oracle(None, [], [], self.mod1, self.entities, self.compartments, self.activities)
def __init__(self, config=None, sgdb=None, db_migrate=None, execution_log=None): self.cli = CLI() self.config = config or {} self.log = LOG(self.config.get("log_dir", None)) self.sgdb = sgdb if self.sgdb is None and not self.config.get("new_migration", None): if self.config.get("db_engine") is 'mysql': from mysql import MySQL self.sgdb = MySQL(config) elif self.config.get("db_engine") is 'oracle': from oracle import Oracle self.sgdb = Oracle(config) self.db_migrate = db_migrate or SimpleDBMigrate(config) if execution_log: self.execution_log = execution_log
def trouver_dernier_octet(login, IV, r, Y_n_before, Y_n): """ Fonction permettant de trouver le dernier octet de X_n, en fonction des blocs Y_n-1 et Y_n. Renverra l'octet trouvé, ainsi que son état intermédiaire (dans un tuple) login : login de l'utilisateur IV : vecteur d'initialisation, donné par le serveur lors de la demande avec SEED r : l'objet Block aléatoire permettant l'attaque Y_n_before : bloc Y_n-1 Y_n : bloc Y_n """ #Création de l'oracle oracle = Oracle(BASE_URL, ORACLE_URL, login, SEED) i = 0 while True: h = "0x{:02x}".format(i) r[15] = int(h, 16) if debug: print("i : {0} ; r : {1}".format(h, r.hex() + Y_n.hex())) reponse = oracle.demande_informations(r.hex() + Y_n.hex(), IV.hex()) if reponse['status'] != 'invalid padding': print("VALIDE : " + reponse['status'] + " - " + str(i)) break i = i+1 #L'octet intermédiaire est i ^ 01 byte_IS = i ^ 1 #Le dernier octet de X_n est donc byte_IS ^ Y_n[len(Y_n)-1] last_byte = byte_IS ^ Y_n_before[15] last_byte = hex(last_byte) #Récupération des deux derniers hexa -> un octet last_byte = last_byte[2:] #Vérification - si la longueur vaut 1, dans ce cas on concatène 0 avec le dernier octet if len(last_byte) == 1: last_byte = "0"+last_byte #Destruction de l'oracle del oracle return (byte_IS, last_byte)
def login(self, received): value = received['value'] oracle = Oracle() return_value = {} return_value['note'] = 'login_over' return_value['value'] = login(oracle, **value) self.bind_staff_transport(value['staff_id'], self.transport) self.write(return_value)
def instanceTax2(**dic): '''发票上架''' try: oracle = Oracle() return threads.deferToThread(instanceTax(oracle, **dic))\ .addErrback(error, oracle)\ .addCallback(right2, oracle) except Exception: raise Exception(getExcInfo())
def main(): result = {} if request.method == 'POST': symbol = request.form.get('symbol') stock = Oracle(symbol, requests.Session()) prediction = stock.predict_future(15) accuracy, increase_accuracy, decrease_accuracy, pred_profit, hold_profit = stock.evaluate_prediction() prediction["timestamp"] = prediction['timestamp'].dt.day result["ticker"] = symbol.upper() result["prediction"] = prediction result["increase_accuracy"] = increase_accuracy result["decrease_accuracy"] = decrease_accuracy result["in_range_accuracy"] = accuracy result["pred_profit"] = pred_profit result["hold_profit"] = hold_profit return render_template('main.html', future=result)
def changeRetail(self, args): try: oracle = Oracle() return defer.succeed(changeRetail(oracle, args))\ .addErrback(error, oracle)\ .addCallback(right, oracle) except Warning_: raise Warning_(getWarningInfo()) except Exception: raise Exception(getExcInfo())
def _create_oracle(self): """ + Description: initialize oracle to fast queries. + Input: - + Output: - """ self.oracle = Oracle(self.world)
def selectBbs(self, args): ''' 查询聊天室 ''' try: oracle = Oracle() return defer.succeed(selectBbs(oracle, **args))\ .addErrback(error, oracle)\ .addCallback(right, oracle) except Warning_: raise Warning_(getWarningInfo()) except Exception: raise Exception(getExcInfo())
def test_in_vitro_basic(self): met1 = Metabolite('met1') met2 = Metabolite('met2') cytosol = Cytosol() cond1 = PresentEntity(met1, cytosol) cond2 = PresentEntity(met2, cytosol) r1 = Reaction('r1', [cond1], [cond2]) self.oracle = Oracle(None, [], [r1], None, [], [], []) expD = ExperimentDescription(ReconstructionActivity('r1'), []) out = self.oracle.execute_in_vitro_exp(expD) self.assertEqual(out.outcome, True)
def __init__(self, T, K, C, sigma2_w, opt_iters, R0_init_scale): self.T = T self.K = K self.C = C self.sigma2_w = sigma2_w self.opt_iters = opt_iters self.R0_init_scale = R0_init_scale self.evaluator = Evaluator(K=self.K, C=self.C) self.DKM = MemoryWriterDKM(K=self.K, C=self.C, sigma2_w=self.sigma2_w) self.VBM = MemoryWriterVBM(K=self.K, C=self.C) self.R0 = np.random.normal(loc=0.0, scale=self.R0_init_scale, size=(self.K, self.C)) self.U0 = np.eye(K) self.pM = DistributionalMemory(R=self.R0, U=self.U0) self.orcl = Oracle(K=K, C=C)
def delRolePrivilege(self, args): '''删除角色权限''' try: oracle = Oracle() return defer.succeed(delRolePrivilege(oracle, **args))\ .addErrback(error, oracle)\ .addCallback(right, oracle) except Warning_: raise Warning_(getWarningInfo()) except Exception: raise Exception(getExcInfo())
def addDistri(self, args): '''入库''' try: oracle = Oracle() return defer.succeed(addDistri(oracle, **args))\ .addErrback(error, oracle)\ .addCallback(right, oracle) except Warning_: raise Warning_(getWarningInfo()) except Exception: raise Exception(getExcInfo())
def getTax(self, staff_id): ''' 取发票 ''' try: oracle = Oracle() return defer.succeed(getTax(oracle, staff_id))\ .addErrback(error, oracle)\ .addCallback(right, oracle) except Warning_: raise Warning_(getWarningInfo()) except Exception: raise Exception(getExcInfo())
def getOrgID(self): ''' 获取id ''' try: oracle = Oracle() return defer.succeed(getOrgID(oracle))\ .addErrback(error, oracle)\ .addCallback(right, oracle) except Warning_: raise Warning_(getWarningInfo()) except Exception: raise Exception(getExcInfo())
def delOrg(self, args): ''' 删除org ''' try: oracle = Oracle() return defer.succeed(delOrg(oracle, args))\ .addErrback(error, oracle)\ .addCallback(right, oracle) except Warning_: raise Warning_(getWarningInfo()) except Exception: raise Exception(getExcInfo())
def hello(self, who): ''' test ''' try: oracle = Oracle() return defer.succeed(hello(who))\ .addErrback(error, oracle)\ .addCallback(right, oracle) except Warning_: raise Warning_(getWarningInfo()) except Exception: raise Exception(getExcInfo())
def checkIfExist(self, args): '''检查号段是否重叠''' try: oracle = Oracle() return defer.succeed(checkIfExist(oracle, **args))\ .addErrback(error, oracle)\ .addCallback(right, oracle) except Warning_: raise Warning_(getWarningInfo()) except Exception: raise Exception(getExcInfo())
def modifyRole(self, args): ''' 修改角色 ''' try: oracle = Oracle() return defer.succeed(modifyRole(oracle, **args))\ .addErrback(error, oracle)\ .addCallback(right, oracle) except Warning_: raise Warning_(getWarningInfo()) except Exception: raise Exception(getExcInfo())
def __init__(self, config, sgdb=None): if not isinstance(config, Config): raise Exception("config must be an instance of simple_db_migrate.config.Config") self.cli = CLI() self.config = config self.log = LOG(self.config.get("log_dir", None)) self.sgdb = sgdb if self.sgdb is None and not self.config.get("new_migration", None): if self.config.get("db_engine") is 'mysql': from mysql import MySQL self.sgdb = MySQL(config) elif self.config.get("db_engine") is 'oracle': from oracle import Oracle self.sgdb = Oracle(config) else: raise Exception("engine not supported '%s'" % self.config.get("db_engine")) self.db_migrate = SimpleDBMigrate(self.config)
def __init__(self, n_streams, master_lock, mode, keychain, loki): self.nP = n_streams self.mode = mode self.totalScore = 0 self.lock = master_lock self.oracle = Oracle(keychain, loki) self.json_db_filename = json_db self.SQL_db_filename = sql_db self.nbatches = 0 self.market = Market() self.initResources()
def test_updatePool(): from oracle import Oracle oracle = Oracle() #select_colums = ['pool_id', 'tax_code', 'tax_nbr'] #where_dic = {"tax_code":"1111",} #print selectPool(oracle, select_colums, where_dic) where_dic = {'tax_code': '1111', 'tax_nbr': '0000'} update_dic = {'tax_code': '1111', 'tax_nbr': '0000'} print updatePool(oracle, update_dic, where_dic)
def test_in_vitro_enz(self): met1 = Metabolite('met1') met2 = Metabolite('met2') cytosol = Cytosol() cond1 = PresentEntity(met1, cytosol) cond2 = PresentEntity(met2, cytosol) cond_enz = PresentCatalyst(cytosol) r1 = Reaction('r1', [cond1, cond_enz], [cond2]) enz = Protein('p1', properties=[Catalyses(r1)]) self.oracle = Oracle(None, [enz], [r1], None, [], [], []) expD = ExperimentDescription(ReconstructionEnzReaction('r1', 'p1'), []) out = self.oracle.execute_in_vitro_exp(expD) self.assertEqual(out.outcome, True)
def test_in_vitro_transp(self): met1 = Metabolite('met1') met2 = Metabolite('met2') cytosol = Cytosol() cond1 = PresentEntity(met1, cytosol) cond2 = PresentEntity(met2, cytosol) cond_trp = PresentTransporter(cytosol) r1 = Reaction('r1', [cond1, cond_trp], [cond2]) transp = Protein('p1', properties=[Transports(r1)]) self.oracle = Oracle(None, [transp], [r1], None, [], [], []) expD = ExperimentDescription(ReconstructionTransporterRequired('r1', 'p1'), []) out = self.oracle.execute_in_vitro_exp(expD) self.assertEqual(out.outcome, True)
def genMergeCase(self,ExcludeClassifier,filterFunc=None): compliant=False x1,x2,Y=None,None,None while not compliant: x1='a'+choice(self.points) x2='a'+choice(self.points) Y=Oracle.mergeDecision(x1,x2) compliant=filterFunc(x1,x2,Y) if filterFunc else True #print x1,x2,'=>',Y s=Schema(x1,x2) R=s.expandX()[0] iSB=s.scoring2merge(R,ExcludeClassifier) X=iSB.mergeInputModel() #X=IN,OUT,SUM,DIFF return X,Y
def genMergeCase_old(self,filterFunc=None,blame=0.2): #use old manual exclude classifier so it s deprecated compliant=False x1,x2,Y=None,None,None while not compliant: x1='a'+choice(self.points) x2='a'+choice(self.points) Y=Oracle.mergeDecision(x1,x2) compliant=filterFunc(x1,x2,Y) if filterFunc else True #print x1,x2,'=>',Y s=Schema(x1,x2) R=s.expandX()[0] iSB=s.scoring2merge_old(R,blameRatio=blame) X=iSB.mergeInputModel() #X=IN,OUT,SUM,DIFF return X,Y
def main(): if len(sys.argv) < 2: data = bytearray('I, the server, hereby agree that I will pay $100 to this student.'.encode('ascii')) data = data[:32] else: data_file = open(sys.argv[1]) data = data_file.read() data_file.close() try: oracle = Oracle() oracle.connect() tag = oracle.mac(data, len(data)) ret = oracle.vrfy(data, len(data), tag) print(ret) if ret == 1: print("Message verified successfully!") else: print("Message verification failed.") finally: oracle.disconnect()
def generate_training_data(self, tagged_sentence, predicted_tags, parse_examples, crel_examples): action_history = [] action_tag_pair_history = [] pos_ptag_seq, pos_ground_truth, tag2span, all_predicted_rtags, all_actual_crels = self.get_tags_relations_for(tagged_sentence, predicted_tags, self.cr_tags) if len(all_predicted_rtags) == 0: return [] words = [wd for wd, tags in tagged_sentence] # Initialize stack, basic parser and oracle stack = Stack(verbose=False) # needs to be a tuple stack.push((ROOT,0)) parser = Parser(stack) oracle = Oracle(pos_ground_truth, parser) predicted_relations = set() # tags without positional info tag_seq = [t for t,i in pos_ptag_seq] rtag_seq = [t for t in tag_seq if t[0].isdigit()] # if not at least 2 concept codes, then can't parse if len(rtag_seq) < 2: return [] # Oracle parsing logic for tag_ix, buffer in enumerate(pos_ptag_seq): buffer_tag = buffer[0] bstart, bstop = tag2span[buffer] buffer_word_seq = words[bstart:bstop + 1] buffer_feats = self.feat_extractor.extract(buffer_tag, buffer_word_seq, self.positive_val) buffer_feats = self.__prefix_feats_("BUFFER", buffer_feats) while True: tos = oracle.tos() tos_tag = tos[0] if tos_tag == ROOT: tos_feats = {} tstart, tstop = -1,-1 else: tstart, tstop = tag2span[tos] tos_word_seq = words[tstart:tstop + 1] tos_feats = self.feat_extractor.extract(tos_tag, tos_word_seq, self.positive_val) tos_feats = self.__prefix_feats_("TOS", tos_feats) btwn_start, btwn_stop = min(tstop+1, len(words)-1), max(0, bstart-1) btwn_words = words[btwn_start:btwn_stop + 1] btwn_feats = self.feat_extractor.extract("BETWEEN", btwn_words, self.positive_val) btwn_feats = self.__prefix_feats_("__BTWN__", btwn_feats) feats = self.get_conditional_feats(action_history, action_tag_pair_history, tos_tag, buffer_tag, tag_seq[:tag_ix], tag_seq [tag_ix + 1:]) interaction_feats = self.get_interaction_feats(tos_feats, buffer_feats) feats.update(buffer_feats) feats.update(tos_feats) feats.update(btwn_feats) feats.update(interaction_feats) gold_action = oracle.consult(tos, buffer) # Consult Oracle or Model based on coin toss rand_float = np.random.random_sample() # between [0,1) (half-open interval, includes 0 but not 1) # If no trained models, always use Oracle if rand_float >= self.beta and len(self.parser_models) > 0: action = self.predict_parse_action(feats, tos_tag) else: action = gold_action action_history.append(action) action_tag_pair_history.append((action, tos_tag, buffer_tag)) cost_per_action = self.compute_cost(pos_ground_truth, pos_ptag_seq[tag_ix:], oracle) # make a copy as changing later parse_examples.add(dict(feats), gold_action, cost_per_action) # Decide the direction of the causal relation if action in [LARC, RARC]: cause_effect = denormalize_cr((tos_tag, buffer_tag)) effect_cause = denormalize_cr((buffer_tag, tos_tag)) if cause_effect in all_actual_crels and effect_cause in all_actual_crels: gold_lr_action = CAUSE_AND_EFFECT elif cause_effect in all_actual_crels: gold_lr_action = CAUSE_EFFECT elif effect_cause in all_actual_crels: gold_lr_action = EFFECT_CAUSE else: gold_lr_action = REJECT # Add additional features # needs to be before predict below feats.update(self.crel_features(action, tos_tag, buffer_tag)) rand_float = np.random.random_sample() if rand_float >= self.beta and len(self.crel_models) > 0: lr_action = self.predict_crel_action(feats) else: lr_action = gold_lr_action if lr_action == CAUSE_AND_EFFECT: predicted_relations.add(cause_effect) predicted_relations.add(effect_cause) elif lr_action == CAUSE_EFFECT: predicted_relations.add(cause_effect) elif lr_action == EFFECT_CAUSE: predicted_relations.add(effect_cause) elif lr_action == REJECT: pass else: raise Exception("Invalid CREL type") # cost is always 1 for this action (cost of 1 for getting it wrong) # because getting the wrong direction won't screw up the parse as it doesn't modify the stack crel_examples.add(dict(feats), gold_lr_action) # Not sure we want to condition on the actions of this crel model # action_history.append(lr_action) # action_tag_pair_history.append((lr_action, tos, buffer)) # end if action in [LARC,RARC] if not oracle.execute(action, tos, buffer): break if oracle.is_stack_empty(): break # Validation logic. Break on pass as relations that should be parsed for pcr in all_actual_crels: l,r = normalize_cr(pcr) if l in rtag_seq and r in rtag_seq and pcr not in predicted_relations: pass return predicted_relations
class Main(object): def __init__(self, config, sgdb=None): if not isinstance(config, Config): raise Exception("config must be an instance of simple_db_migrate.config.Config") self.cli = CLI() self.config = config self.log = LOG(self.config.get("log_dir", None)) self.sgdb = sgdb if self.sgdb is None and not self.config.get("new_migration", None): if self.config.get("db_engine") is 'mysql': from mysql import MySQL self.sgdb = MySQL(config) elif self.config.get("db_engine") is 'oracle': from oracle import Oracle self.sgdb = Oracle(config) else: raise Exception("engine not supported '%s'" % self.config.get("db_engine")) self.db_migrate = SimpleDBMigrate(self.config) def execute(self): self._execution_log("\nStarting DB migration...", "PINK", log_level_limit=1) if self.config.get("new_migration", None): self._create_migration() else: self._migrate() self._execution_log("\nDone.\n", "PINK", log_level_limit=1) def _create_migration(self): migrations_dir = self.config.get("migrations_dir") new_file = Migration.create(self.config.get("new_migration", None), migrations_dir[0], self.config.get("db_script_encoding", "utf-8"), self.config.get("utc_timestamp", False)) self._execution_log("- Created file '%s'" % (new_file), log_level_limit=1) def _migrate(self): destination_version = self._get_destination_version() current_version = self.sgdb.get_current_schema_version() # do it! self._execute_migrations(current_version, destination_version) def _get_destination_version(self): label_version = self.config.get("label_version", None) schema_version = self.config.get("schema_version", None) destination_version = None destination_version_by_label = None destination_version_by_schema = None if label_version is not None: destination_version_by_label = self.sgdb.get_version_number_from_label(label_version) """ if specified label exists at database and schema version was not specified, is equivalent to run simple-db-migrate with schema_version equals to the version with specified label """ if destination_version_by_label is not None and schema_version is None: schema_version = destination_version_by_label self.config.update("schema_version", destination_version_by_label) if schema_version is not None and self.sgdb.get_version_id_from_version_number(schema_version): destination_version_by_schema = schema_version if label_version is None: if schema_version is None: destination_version = self.db_migrate.latest_version_available() elif destination_version_by_schema is None: destination_version = schema_version else: destination_version = destination_version_by_schema else: if schema_version is None: destination_version = self.db_migrate.latest_version_available() elif (destination_version_by_label is None) or (destination_version_by_schema == destination_version_by_label): destination_version = schema_version if (destination_version_by_schema is not None) and (destination_version_by_label is not None) and (destination_version_by_schema != destination_version_by_label): raise Exception("label (%s) and schema_version (%s) don't correspond to the same version at database" % (label_version, schema_version)) if (schema_version is not None and label_version is not None) and ((destination_version_by_schema is not None and destination_version_by_label is None) or (destination_version_by_schema is None and destination_version_by_label is not None)): raise Exception("label (%s) or schema_version (%s), only one of them exists in the database" % (label_version, schema_version)) if destination_version is not '0' and not (self.db_migrate.check_if_version_exists(destination_version) or self.sgdb.get_version_id_from_version_number(destination_version)): raise Exception("version not found (%s)" % destination_version) return destination_version def _get_migration_files_to_be_executed(self, current_version, destination_version, is_migration_up): if current_version == destination_version and not self.config.get("force_execute_old_migrations_versions", False): return [] schema_versions = self.sgdb.get_all_schema_versions() migration_versions = self.db_migrate.get_all_migration_versions() # migration up if is_migration_up: remaining_versions_to_execute = Lists.subtract(migration_versions, schema_versions) remaining_migrations_to_execute = [self.db_migrate.get_migration_from_version_number(version) for version in remaining_versions_to_execute if version <= destination_version] return remaining_migrations_to_execute # migration down... destination_version_id = self.sgdb.get_version_id_from_version_number(destination_version) migrations = self.sgdb.get_all_schema_migrations() down_migrations_to_execute = [migration for migration in migrations if migration.id > destination_version_id] force_files = self.config.get("force_use_files_on_down", False) for migration in down_migrations_to_execute: if not migration.sql_down or force_files: if migration.version not in migration_versions: raise Exception("impossible to migrate down: one of the versions was not found (%s)" % migration.version) migration_tmp = self.db_migrate.get_migration_from_version_number(migration.version) migration.sql_up = migration_tmp.sql_up migration.sql_down = migration_tmp.sql_down migration.file_name = migration_tmp.file_name down_migrations_to_execute.reverse() return down_migrations_to_execute def _execute_migrations(self, current_version, destination_version): """ passed a version: this version don't exists in the database and is younger than the last version -> do migrations up until this version this version don't exists in the database and is older than the last version -> do nothing, is a unpredictable behavior this version exists in the database and is older than the last version -> do migrations down until this version didn't pass a version -> do migrations up until the last available version """ is_migration_up = True # check if a version was passed to the program if self.config.get("schema_version"): # if was passed and this version is present in the database, check if is older than the current version destination_version_id = self.sgdb.get_version_id_from_version_number(destination_version) if destination_version_id: current_version_id = self.sgdb.get_version_id_from_version_number(current_version) # if this version is previous to the current version in database, then will be done a migration down to this version if current_version_id > destination_version_id: is_migration_up = False # if was passed and this version is not present in the database and is older than the current version, raise an exception # cause is trying to go down to something that never was done elif current_version > destination_version: raise Exception("Trying to migrate to a lower version wich is not found on database (%s)" % destination_version) # getting only the migration sql files to be executed migrations_to_be_executed = self._get_migration_files_to_be_executed(current_version, destination_version, is_migration_up) self._execution_log("- Current version is: %s" % current_version, "GREEN", log_level_limit=1) if migrations_to_be_executed is None or len(migrations_to_be_executed) == 0: self._execution_log("- Destination version is: %s" % current_version, "GREEN", log_level_limit=1) self._execution_log("\nNothing to do.\n", "PINK", log_level_limit=1) return self._execution_log("- Destination version is: %s" % (is_migration_up and migrations_to_be_executed[-1].version or destination_version), "GREEN", log_level_limit=1) up_down_label = is_migration_up and "up" or "down" if self.config.get("show_sql_only", False): self._execution_log("\nWARNING: database migrations are not being executed ('--showsqlonly' activated)", "YELLOW", log_level_limit=1) else: self._execution_log("\nStarting migration %s!" % up_down_label, log_level_limit=1) self._execution_log("*** versions: %s\n" % ([ migration.version for migration in migrations_to_be_executed]), "CYAN", log_level_limit=1) sql_statements_executed = [] for migration in migrations_to_be_executed: sql = is_migration_up and migration.sql_up or migration.sql_down if not self.config.get("show_sql_only", False): self._execution_log("===== executing %s (%s) =====" % (migration.file_name, up_down_label), log_level_limit=1) label = None if is_migration_up: label = self.config.get("label_version", None) try: self.sgdb.change(sql, migration.version, migration.file_name, migration.sql_up, migration.sql_down, is_migration_up, self._execution_log, label) except Exception, e: self._execution_log("===== ERROR executing %s (%s) =====" % (migration.abspath, up_down_label), log_level_limit=1) raise e # paused mode if self.config.get("paused_mode", False): raw_input("* press <enter> to continue... ") # recording the last statement executed sql_statements_executed.append(sql) if self.config.get("show_sql", False) or self.config.get("show_sql_only", False): self._execution_log("__________ SQL statements executed __________", "YELLOW", log_level_limit=1) for sql in sql_statements_executed: self._execution_log(sql, "YELLOW", log_level_limit=1) self._execution_log("_____________________________________________", "YELLOW", log_level_limit=1)
def main(): logger.init_logger() o = Oracle() o.run()
def generate_training_data(self, tagged_sentence, predicted_tags, out_parse_examples, out_crel_examples, predict_only=False): pos_ptag_seq, pos_ground_truth_crels, tag2span, all_predicted_rtags, all_actual_crels = self.get_tags_relations_for( tagged_sentence, predicted_tags, self.cr_tags) if predict_only: # clear labels pos_ground_truth_crels = [] all_actual_crels = set() if len(all_predicted_rtags) == 0: return set() words = [wd for wd, tags in tagged_sentence] # Initialize stack, basic parser and oracle # needs to be a tuple parser = ShiftReduceParser(Stack(verbose=False)) parser.stack.push((ROOT, 0)) oracle = Oracle(pos_ground_truth_crels, parser) predicted_relations = set() # type: Set[str] # instead of head and modifiers, we will map causers to effects, and vice versa effect2causers = defaultdict(set) # heads can have multiple modifiers cause2effects = defaultdict(set) # tags without positional info rtag_seq = [t for t, i in pos_ptag_seq if t[0].isdigit()] # if not at least 2 concept codes, then can't parse if len(rtag_seq) < 2: return set() tag2words = defaultdict(list) for ix, tag_pair in enumerate(pos_ptag_seq): bstart, bstop = tag2span[tag_pair] word_seq = words[bstart:bstop + 1] tag2words[tag_pair] = self.ngram_extractor.extract(word_seq) # type: List[str] # Store all words for use in feature extracion tag2words[("ALL",-1)] = words # Oracle parsing logic # consume the buffer for tag_ix, buffer_tag_pair in enumerate(pos_ptag_seq): buffer_tag = buffer_tag_pair[0] bstart, bstop = tag2span[buffer_tag_pair] remaining_buffer_tags = pos_ptag_seq[tag_ix:] # Consume the stack while True: tos_tag_pair = oracle.tos() tos_tag = tos_tag_pair[0] # Returns -1,-1 if TOS is ROOT if tos_tag == ROOT: tstart, tstop = -1, -1 else: tstart, tstop = tag2span[tos_tag_pair] # Note that the end ix in tag2span is always the last index, not the last + 1 btwn_start, btwn_stop = min(tstop + 1, len(words)), max(0, bstart) btwn_word_seq = words[btwn_start:btwn_stop] distance = len(btwn_word_seq) btwn_word_ngrams = self.ngram_extractor.extract(btwn_word_seq) # type: List[str] feats = self.feat_extractor.extract(stack_tags=oracle.parser.stack.contents(), buffer_tags=remaining_buffer_tags, tag2word_seq=tag2words, between_word_seq=btwn_word_ngrams, distance=distance, cause2effects=cause2effects, effect2causers=effect2causers, positive_val=self.positive_val) # Consult Oracle or Model based on coin toss if predict_only: action = self.predict_parse_action(feats=feats, tos=tos_tag, models=self.parser_models[-1], vectorizer=self.parser_feature_vectorizers[-1]) else: # if training gold_action = oracle.consult(tos_tag_pair, buffer_tag_pair) rand_float = np.random.random_sample() # between [0,1) (half-open interval, includes 0 but not 1) # If no trained models, always use Oracle if len(self.parser_models) == 0: action = gold_action elif rand_float <= self.beta: action = self.predict_parse_action(feats=feats, tos=tos_tag, models=self.parser_models[-1], vectorizer=self.parser_feature_vectorizers[-1]) else: if len(self.parser_models) < 2: action = gold_action # use previous model if available else: action = self.predict_parse_action(feats=feats, tos=tos_tag, models=self.parser_models[-2], vectorizer=self.parser_feature_vectorizers[-2]) # Given the remaining tags, what is the cost of this decision # in terms of the optimal decision(s) that can be made? cost_per_action = self.cost_function(pos_ground_truth_crels, remaining_buffer_tags, oracle) # make a copy as changing later out_parse_examples.add(dict(feats), gold_action, cost_per_action) # Decide the direction of the causal relation if action in [LARC, RARC]: c_e_pair = (tos_tag, buffer_tag) # Convert to a string Causer:{l}->Result:{r} cause_effect = denormalize_cr(c_e_pair) e_c_pair = (buffer_tag, tos_tag) # Convert to a string Causer:{l}->Result:{r} effect_cause = denormalize_cr(e_c_pair) if predict_only: gold_lr_action = None else: if cause_effect in all_actual_crels and effect_cause in all_actual_crels: gold_lr_action = CAUSE_AND_EFFECT elif cause_effect in all_actual_crels: gold_lr_action = CAUSE_EFFECT elif effect_cause in all_actual_crels: gold_lr_action = EFFECT_CAUSE else: gold_lr_action = REJECT # Add additional features # needs to be before predict below crel_feats = self.crel_features(action, tos_tag, buffer_tag) feats.update(crel_feats) rand_float = np.random.random_sample() if predict_only: lr_action = self.predict_crel_action(feats=feats, model=self.crel_models[-1], vectorizer=self.crel_feat_vectorizers[-1]) else: if len(self.crel_models) == 0: lr_action = gold_lr_action elif rand_float <= self.beta: lr_action = self.predict_crel_action(feats=feats, model=self.crel_models[-1], vectorizer=self.crel_feat_vectorizers[-1]) else: if len(self.crel_models) < 2: lr_action = gold_lr_action else: lr_action = self.predict_crel_action(feats=feats, model=self.crel_models[-2], vectorizer=self.crel_feat_vectorizers[-2]) if lr_action == CAUSE_AND_EFFECT: predicted_relations.add(cause_effect) predicted_relations.add(effect_cause) cause2effects[tos_tag_pair].add(buffer_tag_pair) effect2causers[buffer_tag_pair].add(tos_tag_pair) cause2effects[buffer_tag_pair].add(tos_tag_pair) effect2causers[tos_tag_pair].add(buffer_tag_pair) elif lr_action == CAUSE_EFFECT: predicted_relations.add(cause_effect) cause2effects[tos_tag_pair].add(buffer_tag_pair) effect2causers[buffer_tag_pair].add(tos_tag_pair) elif lr_action == EFFECT_CAUSE: predicted_relations.add(effect_cause) cause2effects[buffer_tag_pair].add(tos_tag_pair) effect2causers[tos_tag_pair].add(buffer_tag_pair) elif lr_action == REJECT: pass else: raise Exception("Invalid CREL type") # cost is always 1 for this action (cost of 1 for getting it wrong) # because getting the wrong direction won't screw up the parse as it doesn't modify the stack if not predict_only: out_crel_examples.add(dict(feats), gold_lr_action) # Not sure we want to condition on the actions of this crel model # action_history.append(lr_action) # action_tag_pair_history.append((lr_action, tos, buffer)) # end if action in [LARC,RARC] if not oracle.execute(action, tos_tag_pair, buffer_tag_pair): break if oracle.is_stack_empty(): break # Validation logic. Break on pass as relations that should be parsed # for pcr in all_actual_crels: # l,r = normalize_cr(pcr) # if l in rtag_seq and r in rtag_seq and pcr not in predicted_relations: # pass return predicted_relations
def predict_sentence(self, tagged_sentence, predicted_tags): action_history = [] action_tag_pair_history = [] pos_ptag_seq, _, tag2span, all_predicted_rtags, _ = self.get_tags_relations_for(tagged_sentence, predicted_tags, self.cr_tags) if len(all_predicted_rtags) == 0: return [] words = [wd for wd, tags in tagged_sentence] # Initialize stack, basic parser and oracle stack = Stack(verbose=False) # needs to be a tuple stack.push((ROOT,0)) parser = Parser(stack) oracle = Oracle([], parser) predicted_relations = set() # tags without positional info tag_seq = [t for t,i in pos_ptag_seq] rtag_seq = [t for t in tag_seq if t[0].isdigit()] # if not at least 2 concept codes, then can't parse if len(rtag_seq) < 2: return [] # Oracle parsing logic for tag_ix, buffer in enumerate(pos_ptag_seq): buffer_tag = buffer[0] bstart, bstop = tag2span[buffer] buffer_word_seq = words[bstart:bstop + 1] buffer_feats = self.feat_extractor.extract(buffer_tag, buffer_word_seq, self.positive_val) buffer_feats = self.__prefix_feats_("BUFFER", buffer_feats) while True: tos = oracle.tos() tos_tag = tos[0] if tos_tag == ROOT: tos_feats = {} tstart, tstop = -1,-1 else: tstart, tstop = tag2span[tos] tos_word_seq = words[tstart:tstop + 1] tos_feats = self.feat_extractor.extract(tos_tag, tos_word_seq, self.positive_val) tos_feats = self.__prefix_feats_("TOS", tos_feats) btwn_start, btwn_stop = min(tstop+1, len(words)-1), max(0, bstart-1) btwn_words = words[btwn_start:btwn_stop + 1] btwn_feats = self.feat_extractor.extract("BETWEEN", btwn_words, self.positive_val) btwn_feats = self.__prefix_feats_("__BTWN__", btwn_feats) feats = self.get_conditional_feats(action_history, action_tag_pair_history, tos_tag, buffer_tag, tag_seq[:tag_ix], tag_seq [tag_ix + 1:]) interaction_feats = self.get_interaction_feats(tos_feats, buffer_feats) feats.update(buffer_feats) feats.update(tos_feats) feats.update(btwn_feats) feats.update(interaction_feats) # Consult Oracle or Model based on coin toss action = self.predict_parse_action(feats, tos_tag) action_history.append(action) action_tag_pair_history.append((action, tos_tag, buffer_tag)) # Decide the direction of the causal relation if action in [LARC, RARC]: cause_effect = denormalize_cr((tos_tag, buffer_tag)) effect_cause = denormalize_cr((buffer_tag, tos_tag)) # Add additional features # needs to be before predict below feats.update(self.crel_features(action, tos_tag, buffer_tag)) lr_action = self.predict_crel_action(feats) if lr_action == CAUSE_AND_EFFECT: predicted_relations.add(cause_effect) predicted_relations.add(effect_cause) elif lr_action == CAUSE_EFFECT: predicted_relations.add(cause_effect) elif lr_action == EFFECT_CAUSE: predicted_relations.add(effect_cause) elif lr_action == REJECT: pass else: raise Exception("Invalid CREL type") # end if action in [LARC,RARC] if not oracle.execute(action, tos, buffer): break if oracle.is_stack_empty(): break # Validation logic. Break on pass as relations that should be parsed return predicted_relations
def trouver_bloc(login, bytes_IS_block, IV, r, Y_n_before, Y_n, last_byte, nb_block): """ Fonction permettant de trouver le dernier bloc X_n du message à déchiffrer. Renverra le bloc contenant tous les octets trouvés. login : login de l'utilisateur bytes_IS_block : objet Block qui contenant absolument tous les états intermédiaires calculés (pour évaluation padding) IV : vecteur d'initialisation, donné par le serveur lors de la demande avec SEED r : l'objet Block aléatoire permettant l'attaque Y_n_before : bloc Y_n-1 Y_n : bloc Y_n last_byte : dernier octet trouvé du bloc sur lequel on effectue l'attaque """ #Création de l'oracle oracle = Oracle(BASE_URL, ORACLE_URL, login, SEED) #Bloc d'octets - initialisé à last_byte bloc = last_byte #i sert à chercher l'octet en question i = 0 #j sert à changer de position dans le block (on décrémente) j = 14 #Boucle permettant de calculer le bloc entier concerné while True: #Calcul de h en hexadécimal h = "0x{:02x}".format(i) #Transformation de r[j] via h entier (hexa) r[j] = int(h, 16) if debug: print("i : {0} ; r : {1}".format(h, r.hex() + Y_n.hex())) reponse = oracle.demande_informations(r.hex() + Y_n.hex(), IV.hex()) #Si la réponse renvoyée par le serveur n'est pas invalide, le padding est bon! if reponse['status'] == 'invalid padding': i = i+1 else: if debug: print("VALIDE : {0} - i={1}".format(reponse['status'],str(i))) #L'octet de l'état intermédiaire est calculé bytes_IS_block[j] = i ^ (16 - j) #On le xor avec le bloc précédent à l'état j+1 byte = bytes_IS_block[j] ^ Y_n_before[j] byte = hex(byte) byte = byte[2:] if len(byte) == 1: byte = "0"+byte print("L'octet {0} de X_n est {1}".format(j, byte)) #envoie_octet_intermediaire(login, (nb_block * 16 + (16 - j)), byte) bloc = "{0}{1}".format(byte, bloc) #L'état intermédiaire est modifié de façon à calculer le nouvel octet à la position donnée for k in range (j, 16): r[k] = bytes_IS_block[k] ^ (16 - (j - 1)) #On décrémente j -> passage vers une position moindre j = j-1 #Réinitialisation de i, permettant de commencer l'attaque à 0 i=0 #Si la position de l'octet est inférieure à 0, on stoppe! if j<0: break #Destruction de l'oracle del oracle return bloc
class Main(object): def __init__(self, config=None, mysql=None, db_migrate=None): self.cli = CLI() self.config = config or {} self.sgdb = mysql if self.sgdb is None and not self.config.get("new_migration"): if self.config.get("db_engine") is 'mysql': from mysql import MySQL self.sgdb = MySQL(config) elif self.config.get("db_engine") is 'oracle': from oracle import Oracle self.sgdb = Oracle(config) self.db_migrate = db_migrate or SimpleDBMigrate(config) def execute(self): self.cli.msg("\nStarting DB migration...", "PINK") if self.config.get("new_migration"): self.create_migration() else: self.migrate() self.cli.msg("\nDone.\n", "PINK") def create_migration(self): # TODO: create file in the migrations directory, not in current new_file = Migration.create(self.config.get("new_migration")) self.cli.msg("- Created file '%s'" % (new_file)) def migrate(self): destination_version = self.get_destination_version() current_version = self.sgdb.get_current_schema_version() self.cli.msg("- Current version is: %s" % current_version, "GREEN") self.cli.msg("- Destination version is: %s" % destination_version, "GREEN") # if current and destination versions are the same, # will consider a migration up to execute remaining files is_migration_up = True if int(current_version) > int(destination_version): is_migration_up = False # do it! self.execute_migrations(current_version, destination_version, is_migration_up) def get_destination_version(self): destination_version = self.config.get("schema_version") if destination_version is None: destination_version = self.db_migrate.latest_version_available() if destination_version is not '0' and not self.db_migrate.check_if_version_exists(destination_version): raise Exception("version not found (%s)" % destination_version) return destination_version def get_migration_files_to_be_executed(self, current_version, destination_version): schema_versions = self.sgdb.get_all_schema_versions() migration_versions = self.db_migrate.get_all_migration_versions() # migration up: the easy part if current_version <= destination_version: remaining_versions_to_execute = Lists.subtract(migration_versions, schema_versions) remaining_versions_to_execute = [version for version in remaining_versions_to_execute if version <= destination_version] return remaining_versions_to_execute # migration down... down_versions = [version for version in schema_versions if version <= current_version and version > destination_version] for version in down_versions: if version not in migration_versions: raise Exception("impossible to migrate down: one of the versions was not found (%s)" % version) down_versions.reverse() return down_versions def execute_migrations(self, current_version, destination_version, is_migration_up): # getting only the migration sql files to be executed versions_to_be_executed = self.get_migration_files_to_be_executed(current_version, destination_version) if versions_to_be_executed is None or len(versions_to_be_executed) == 0: self.cli.msg("\nNothing to do.\n", "PINK") return up_down_label = is_migration_up and "up" or "down" if self.config.get("show_sql_only"): self.cli.msg("\nWARNING: database migrations are not being executed ('--showsqlonly' activated)", "YELLOW") else: self.cli.msg("\nStarting migration %s!" % up_down_label) if self.config.get("log_level") >= 1: self.cli.msg("*** versions: %s\n" % versions_to_be_executed, "CYAN") sql_statements_executed = [] for migration_version in versions_to_be_executed: migration = self.db_migrate.get_migration_from_version_number(migration_version) sql = is_migration_up and migration.sql_up or migration.sql_down if not self.config.get("show_sql_only"): if self.config.get("log_level") >= 1: self.cli.msg("===== executing %s (%s) =====" % (migration.file_name, up_down_label)) log = None if self.config.get("log_level") >= 2: log = self.cli.msg self.sgdb.change(sql, migration_version, is_migration_up, execution_log=log) # paused mode if self.config.get("paused_mode"): raw_input("* press <enter> to continue... ") # recording the last statement executed sql_statements_executed.append(sql) if self.config.get("show_sql") or self.config.get("show_sql_only"): self.cli.msg("__________ SQL statements executed __________", "YELLOW") for sql in sql_statements_executed: self.cli.msg(sql, "YELLOW") self.cli.msg("_____________________________________________", "YELLOW")
class OracleTest(unittest.TestCase): def setUp(self): self.g1 = Gene('g1') self.p1 = Protein('p1') self.met1 = Metabolite('met1') self.met2 = Metabolite('met2') self.cplx1 = Complex('cplx1') self.cytosol = Cytosol() self.cond1 = PresentEntity(self.met1, self.cytosol) self.cond2 = PresentEntity(self.met2, self.cytosol) self.cond3 = PresentEntity(self.p1, self.cytosol) self.cond4 = PresentEntity(self.cplx1, self.cytosol) self.growth = Growth('growth', [self.cond2]) self.r1 = Reaction('r1', [self.cond1], [self.cond2]) self.r2 = Reaction('r2', [self.cond3], [self.cond4]) self.entities = [self.g1, self.p1, self.met1, self.met2, self.cplx1] self.compartments = [self.cytosol] self.activities = [self.growth, self.r1, self.r2] self.setup_conds = [self.cond1, self.cond3] self.mod1 = Model('m0', self.setup_conds, [self.growth, self.r1], []) self.mod2 = Model('m1', self.setup_conds, [self.growth, self.r2], []) self.oracle = Oracle(None, [], [], self.mod1, self.entities, self.compartments, self.activities) def tearDown(self): self.oracle = None def test_in_vitro_basic(self): met1 = Metabolite('met1') met2 = Metabolite('met2') cytosol = Cytosol() cond1 = PresentEntity(met1, cytosol) cond2 = PresentEntity(met2, cytosol) r1 = Reaction('r1', [cond1], [cond2]) self.oracle = Oracle(None, [], [r1], None, [], [], []) expD = ExperimentDescription(ReconstructionActivity('r1'), []) out = self.oracle.execute_in_vitro_exp(expD) self.assertEqual(out.outcome, True) def test_in_vitro_enz(self): met1 = Metabolite('met1') met2 = Metabolite('met2') cytosol = Cytosol() cond1 = PresentEntity(met1, cytosol) cond2 = PresentEntity(met2, cytosol) cond_enz = PresentCatalyst(cytosol) r1 = Reaction('r1', [cond1, cond_enz], [cond2]) enz = Protein('p1', properties=[Catalyses(r1)]) self.oracle = Oracle(None, [enz], [r1], None, [], [], []) expD = ExperimentDescription(ReconstructionEnzReaction('r1', 'p1'), []) out = self.oracle.execute_in_vitro_exp(expD) self.assertEqual(out.outcome, True) def test_in_vitro_transp(self): met1 = Metabolite('met1') met2 = Metabolite('met2') cytosol = Cytosol() cond1 = PresentEntity(met1, cytosol) cond2 = PresentEntity(met2, cytosol) cond_trp = PresentTransporter(cytosol) r1 = Reaction('r1', [cond1, cond_trp], [cond2]) transp = Protein('p1', properties=[Transports(r1)]) self.oracle = Oracle(None, [transp], [r1], None, [], [], []) expD = ExperimentDescription(ReconstructionTransporterRequired('r1', 'p1'), []) out = self.oracle.execute_in_vitro_exp(expD) self.assertEqual(out.outcome, True) def test_in_vivo(self): expD = ExperimentDescription(DetectionActivity('r1'), []) res = self.oracle.execute_in_vivo(expD) self.assertEqual(res.outcome, True) def test_process_output_ent_detection_1(self): expD = ExperimentDescription(DetectionEntity('met1'), []) out = 'Answer: 1\nsynthesizable(met1,ver,c_05,m0)' res = self.oracle.process_output(out, expD) self.assertEqual(res.outcome, True) def test_process_output_localisation_ent_1(self): expD = ExperimentDescription(LocalisationEntity('met1', 'c_05'), []) out = 'Answer: 1\nsynthesizable(met1,ver,c_05,m0)' res = self.oracle.process_output(out, expD) self.assertEqual(res.outcome, True) def test_process_output_ent_detection_2(self): expD = ExperimentDescription(DetectionEntity('met1'), []) out = 'Answer: 1\ninitially_present(met1,ver,c_05,m0)' res = self.oracle.process_output(out, expD) self.assertEqual(res.outcome, True) def test_process_output_localisation_ent_2(self): expD = ExperimentDescription(LocalisationEntity('met1', 'c_05'), []) out = 'Answer: 1\ninitially_present(met1,ver,c_05,m0)' res = self.oracle.process_output(out, expD) self.assertEqual(res.outcome, True) def test_process_output_act_detection(self): expD = ExperimentDescription(DetectionActivity('r1'), []) out = 'Answer: 1\nactive(r1,m0)' res = self.oracle.process_output(out, expD) self.assertEqual(res.outcome, True) def test_process_output_adam_two_factor(self): expD = ExperimentDescription(AdamTwoFactorExperiment('g1', 'met1'), []) out = 'Answer: 1\npredicts(m0,experiment(adam_two_factor_exp,g1,met1),true' res = self.oracle.process_output(out, expD) self.assertEqual(res.outcome, True)