def __init__(self, config_file=None, input_table=None, **kwargs): self.table_config = TableDataSource(config_file=config_file) self.input_table = input_table self.logic_engine = logic_engine Logic(self.logic_engine) super(GraphNode, self).__init__()
def leer_perfil(self, datos): str_experto = datos[0] str_medico = "" if(datos[1] == "si" and datos[2]=="si" and datos[3] == "si"): str_medico = "si" elif(datos[1] == "si" and datos[2]=="no" and datos[3] == "si"): str_medico = "si" else: str_medico = "no" Logic(self.conocimiento) try: pyDatalog.assert_fact("resp_experto",str_experto) pyDatalog.assert_fact("resp_medico",str_medico) q = "tipo_usuario(X,Y)" rq_3 = pyDatalog.ask(q) arr_resp = [ rq_3.answers[0][0], rq_3.answers[0][1] ] return arr_resp except: print("error")
def assertPatient(self, name, age, sick): Logic(self.first) #metodo que agrega un paciente a la base de conocimiento pyDatalog.assert_fact('age', name, age) pyDatalog.assert_fact('sick_of', name, sick) tq2 = "sick_of(" + name + ",Y)" tq3 = "require_limits_temperature(" + name + ",A,B)" tq4 = "require_temperature(" + name + ",A)" tq5 = "is_in_stage(" + name + ",A)" q2 = pyDatalog.ask(tq2) q3 = pyDatalog.ask(tq3) q4 = pyDatalog.ask(tq4) q5 = pyDatalog.ask(tq5) db = self.con.conexion() coleccion = db['sensor_temp'] query = {"activo": "t"} data = { "$set": { "act0_status": "f", "act1_status": "f", "enfermedad": q2.answers[0][0], "temp_min": int(q3.answers[0][1]), "temp_max": int(q3.answers[0][0]), "temp_pref": int(q4.answers[0][0]), "stage": q5.answers[0][0] } } coleccion.update_one(query, data) print("mongodb updated") return 'saved'
def dump_facts(): m = Logic(True) for v in sorted(m.Db.values(), key=str): if v.name[0] in 'abcdefghijklmnopqrstuvwxyz' and '==' not in v.name: for c in v.db.values(): if not c.body: print('+', c.head)
def queen(thread_name): n = int(random.random() * 8) + 1 # 1 to 8 Logic() queens(X0) <= (X0._in(range(n))) queens(X0,X1) <= queens(X0) & next_queen(X0,X1) queens(X0,X1,X2) <= queens(X0,X1) & next_queen(X0,X1,X2) queens(X0,X1,X2,X3) <= queens(X0,X1,X2) & next_queen(X0,X1,X2,X3) queens(X0,X1,X2,X3,X4) <= queens(X0,X1,X2,X3) & next_queen(X0,X1,X2,X3,X4) queens(X0,X1,X2,X3,X4,X5) <= queens(X0,X1,X2,X3,X4) & next_queen(X0,X1,X2,X3,X4,X5) queens(X0,X1,X2,X3,X4,X5,X6) <= queens(X0,X1,X2,X3,X4,X5) & next_queen(X0,X1,X2,X3,X4,X5,X6) queens(X0,X1,X2,X3,X4,X5,X6,X7) <= queens(X0,X1,X2,X3,X4,X5,X6) & next_queen(X0,X1,X2,X3,X4,X5,X6,X7) next_queen(X0,X1) <= queens(X1) & ok(X0,1,X1) next_queen(X0,X1,X2) <= next_queen(X1,X2) & ok(X0,2,X2) next_queen(X0,X1,X2,X3) <= next_queen(X1,X2,X3) & ok(X0,3,X3) next_queen(X0,X1,X2,X3,X4) <= next_queen(X1,X2,X3,X4) & ok(X0,4,X4) next_queen(X0,X1,X2,X3,X4,X5) <= next_queen(X1,X2,X3,X4,X5) & ok(X0,5,X5) next_queen(X0,X1,X2,X3,X4,X5,X6) <= next_queen(X1,X2,X3,X4,X5,X6) & ok(X0,6,X6) next_queen(X0,X1,X2,X3,X4,X5,X6,X7) <= next_queen(X1,X2,X3,X4,X5,X6,X7) & ok(X0,7,X7) query = pyDatalog.ask("queens(%s)" % (",".join("X%s" % i for i in range(n)))) answers = query.answers if query else [] result = "OK" if len(answers) == [1,0,0,2,10,4,40,92][n-1] else "* not OK ! *" print("%s : n = %d %s " % (thread_name, n, result))
def __init__(self): print('Base de conocimientos') self.conocimiento=Logic(True) pyDatalog.load(""" + usar_computadora('experto','si') + usar_computadora('inexperto','no') es_experto(Y) <= usar_computadora(Y,Z) & resp_experto(Z) # Es médico + terminologia('medico','si') + pacientes('medico','si') + recetar('medico','si') # No es médico + terminologia('persona','no') + pacientes('persona','no') + recetar('persona','no') es_medico(X) <= terminologia(X,A) & pacientes(X,A) & recetar(X,A) & resp_medico(A) tipo_usuario(X,Y) <= es_medico(X) & es_experto(Y) """)
def dump_all(): m = Logic(True) for v in sorted(m.Db.values(), key=str): for c in v.db.values(): if not c.body: print('+', c.head) else: print(c.head, '<=', c.body)
def readPatient(self, name): Logic(self.first) #metodo que consulta los datos del paciente actualmente presente en la base de conocimiento returnvalue = "" try: tq1 = "age(" + name + ",Y)" tq2 = "sick_of(" + name + ",Y)" q1 = pyDatalog.ask(tq1) q2 = pyDatalog.ask(tq2) returnvalue = "{'name':'" + name + "','age':'" + q1.answers[0][ 0] + "','sick':'" + q2.answers[0][0] + "'}" except: returnvalue = "NOPATIENT" return returnvalue
def retractPatient(self, name): Logic(self.first) #Método para olvidar el paciente, recupera sus datos de la base de conocimiento y lo elimina returnvalue = '' try: tq1 = "age(" + name + ",Y)" tq2 = "sick_of(" + name + ",Y)" q1 = pyDatalog.ask(tq1) q2 = pyDatalog.ask(tq2) pyDatalog.retract_fact('age', name, q1.answers[0][0]) pyDatalog.retract_fact('sick_of', name, q2.answers[0][0]) return 'retracted' except: returnvalue = "NOPATIENT" return returnvalue
def createAttackScenarioLogic(currentAttackScenario): #for currentAttackScenario in attackScenarios: #Clear all vulnerabilities and add only those for a specific attack Logic() setup() #serviceTuple is a tuple of services on the attack path serviceTuple = currentAttackScenario[0] #we have to add the final (target) service to the end of the path serviceTuple = serviceTuple + (targetService,) #vulnerabilityTuple is a tuple of the vulnerabilities exploited along the attack path vulnerabilityTuple = currentAttackScenario[1] #print("Creating Attack Scenario...") for s,v in zip(serviceTuple,vulnerabilityTuple): if v != "legitimate": #print("Adding Fact: " + v + "(" + s + ",0)") pyDatalog.assert_fact(v,s,0)
def retractpreferences(self): Logic(self.first) returnvalue = '' try: db = self.con.conexion() coleccion = db['sensor_temp'] query = {"activo": "t"} req = list(coleccion.find(query))[0] tq1 = "prefer_temperature(" + req['enfermedad'] + "," + req[ 'stage'] + ",X)" q1 = pyDatalog.ask(tq1) pyDatalog.retract_fact('prefer_temperature', req['enfermedad'], req['stage'], q1.answers[0], [0]) pyDatalog.assert_fact('prefer_temperature', req['enfermedad'], req['stage'], req['temp_pref']) return 'retracted' except: returnvalue = "NOPATIENT" return returnvalue
def queen(thread_name, logic8, logic, n): print("start %s" % thread_name) Logic(logic8) check_logic(thread_name, 8) Logic(logic) check_logic(thread_name, n)
][n - 1] else answers print("%s : n = %d %s " % (thread_name, n, result)) def queen(thread_name, logic8, logic, n): print("start %s" % thread_name) Logic(logic8) check_logic(thread_name, 8) Logic(logic) check_logic(thread_name, n) # create queen resolution logic for N = 1 to 8 logic = [] for i in range(8): logic.append(Logic()) add_logic(i + 1) Logic(logic[7]) check_logic("Main", 8) Logic(logic[4]) check_logic("Main", 5) # start 20 threads, each with a randomly-chosen logic for i in range(20): n = int(random.random() * 8) + 1 # 1 to 8 t = threading.Thread(target=queen, args=("thread %02d" % i, (logic[7]), logic[n - 1], n)) t.start()
def run(self): Logic(self.logic) loadBigString(self.loadStr, self.threadID)
from pyDatalog import pyDatalog from pyDatalog import Logic import logging from pyDatalog import pyEngine pyEngine.Trace = True import copy import itertools import gc #logging.basicConfig(level=logging.DEBUG) Logic() #pyDatalog.create_terms('a','b','c','d','isTrue','equal','X','Y','Z') #equal(X,Y) <= equal(Y,X) #equal(X,Z) <= equal(X,Y) & equal(Y,Z) #+ equal('a','b') #+ equal('b','c') #myAnswer = pyDatalog.ask('equal(X,Y)') #print str(myAnswer.answers) pyDatalog.create_terms( 'parent,child,grandparent,X,Y,Z,Alice,Bob,Charlie,David,Eve') grandparent(X, Z) <= parent(X, Y) & parent(Y, Z) +parent('Alice', 'Bob') +parent('Bob', 'Charlie') +parent('Charlie', 'David')
# -*- coding: utf-8 -*- """ Created on Thu Nov 7 18:01:54 2019 @author: Moi """ from pyDatalog import pyDatalog, Logic Logic() # initializes the pyDatalog engine Logic() # creates an empty set of clauses for use in the current thread # add first set of clauses here first = Logic(True) # save the current set of clauses in variable 'first' Logic() # first is not affected by this statement # define the second set of clauses here second = Logic(True) # save it for later use Logic(first) # now use first in the current thread # queries will now run against the first set of rules
def _check_instance(self): if id(self) != id(Rota.last_instance): Rota.last_instance._logic = Logic(True) Rota.last_instance = self Logic(self._logic)
def __init__(self, conexion): print('KB loaded') self.con = conexion self.first = Logic(True) pyDatalog.load(""" #Reglas para la inferencia de la edad #Niñez + stage_life('0','childhood') + stage_life('1','childhood') + stage_life('2','childhood') + stage_life('3','childhood') + stage_life('4','childhood') + stage_life('5','childhood') + stage_life('6','childhood') + stage_life('7','childhood') + stage_life('8','childhood') + stage_life('9','childhood') + stage_life('10','childhood') + stage_life('11','childhood') + stage_life('12','childhood') + stage_life('13','childhood') + stage_life('14','childhood') + stage_life('15','childhood') + stage_life('16','childhood') + stage_life('17','childhood') #Adulto + stage_life('18','adulthood') + stage_life('19','adulthood') + stage_life('20','adulthood') + stage_life('21','adulthood') + stage_life('22','adulthood') + stage_life('23','adulthood') + stage_life('24','adulthood') + stage_life('25','adulthood') + stage_life('26','adulthood') + stage_life('27','adulthood') + stage_life('28','adulthood') + stage_life('29','adulthood') + stage_life('30','adulthood') + stage_life('31','adulthood') + stage_life('32','adulthood') + stage_life('33','adulthood') + stage_life('34','adulthood') + stage_life('35','adulthood') + stage_life('36','adulthood') + stage_life('37','adulthood') + stage_life('38','adulthood') + stage_life('39','adulthood') + stage_life('40','adulthood') + stage_life('41','adulthood') + stage_life('42','adulthood') + stage_life('43','adulthood') + stage_life('44','adulthood') + stage_life('45','adulthood') + stage_life('46','adulthood') + stage_life('47','adulthood') + stage_life('48','adulthood') + stage_life('49','adulthood') + stage_life('50','adulthood') + stage_life('51','adulthood') + stage_life('52','adulthood') + stage_life('53','adulthood') + stage_life('54','adulthood') + stage_life('55','adulthood') + stage_life('56','adulthood') + stage_life('57','adulthood') + stage_life('58','adulthood') + stage_life('59','adulthood') #Edad Adulta + stage_life('60','senior') + stage_life('61','senior') + stage_life('62','senior') + stage_life('63','senior') + stage_life('64','senior') + stage_life('65','senior') + stage_life('66','senior') + stage_life('67','senior') + stage_life('68','senior') + stage_life('69','senior') + stage_life('70','senior') + stage_life('71','senior') + stage_life('72','senior') + stage_life('73','senior') + stage_life('74','senior') + stage_life('75','senior') + stage_life('76','senior') + stage_life('77','senior') + stage_life('78','senior') + stage_life('79','senior') + stage_life('80','senior') + stage_life('81','senior') + stage_life('82','senior') + stage_life('83','senior') + stage_life('84','senior') + stage_life('85','senior') + stage_life('86','senior') + stage_life('87','senior') + stage_life('88','senior') + stage_life('89','senior') + stage_life('91','senior') + stage_life('92','senior') + stage_life('93','senior') + stage_life('94','senior') + stage_life('95','senior') + stage_life('96','senior') + stage_life('97','senior') + stage_life('98','senior') + stage_life('99','senior') + stage_life('100','senior') #Regla para inferir la etapa de una persona is_in_stage(X,Z) <= age(X,Y) & stage_life(Y,Z) #Limites de temperatura #Respiratoria + has_limits('respiratory','childhood','26','25') + has_limits('respiratory','adulthood','27','24') + has_limits('respiratory','senior','27','24') #Golpe de calor y similares + has_limits('heatstroke','childhood','25','22') + has_limits('heatstroke','adulthood','25','21') + has_limits('heatstroke','senior','26','22') #Hipotermia y similares + has_limits('hypothermia','childhood','29','25') + has_limits('hypothermia','adulthood','30','24') + has_limits('hypothermia','senior','30','25') #Otras enfermedades + has_limits('other','childhood','27','25') + has_limits('other','adulthood','28','24') + has_limits('other','senior','28','24') #Temperatura preferida anterior #Respiratoria + prefer_temperature('respiratory','childhood','24') + prefer_temperature('respiratory','adulthood','25') + prefer_temperature('respiratory','senior','26') #Golpe de calor y similares + prefer_temperature('heatstroke','childhood','22') + prefer_temperature('heatstroke','adulthood','23') + prefer_temperature('heatstroke','senior','25') #Hipotermia y similares + prefer_temperature('hypothermia','childhood','28') + prefer_temperature('hypothermia','adulthood','27') + prefer_temperature('hypothermia','senior','28') #Otras enfermedades + prefer_temperature('other','childhood','25') + prefer_temperature('other','adulthood','27') + prefer_temperature('other','senior','27') #Reglas para inferencia de datos de temperatura require_temperature(X,A) <= is_in_stage(X,Y) & sick_of(X,Z) & prefer_temperature(Z,Y,A) require_limits_temperature(X,A,B) <= is_in_stage(X,Y) & sick_of(X,Z) & has_limits(Z,Y,A,B) """)
def _new_instance(self): if Rota.last_instance is not None: Rota.last_instance._logic = Logic(True) Logic() Rota.last_instance = self
def run(self): #threadLock.acquire() Logic(self.logic) loadBigString(self.loadStr, self.threadID)
from pyDatalog import pyDatalog from pyDatalog import Logic import logging from pyDatalog import pyEngine pyEngine.Trace = True import copy import itertools import gc import sys from operator import itemgetter import pprint import time #This is the risk metric Logic() pyDatalog.create_terms('connectsTo,residesOn,runs,TargetHost,SourceHost,DestHost,TargetService,SourceService,compromised,connectsToWithPrivileges,questionableWithinRisk,functionQuestionableWithinRisk','allAttackerPathsCostPlus','F','F2','FuncName','U','U2','Util','allAttackerPathsCostPlus','SS','TS','IS1','functionDown','functionalityFree','Prob') pyDatalog.create_terms('cTo,cToWithPrivileges,ServiceA,ServiceB,HostA,HostB,localRootExploit,remoteRootExploit,attackerConnectsToWithPrivileges,attackerReachable') pyDatalog.create_terms('allPaths,allAttackerPaths,P,P2,IntermediateService1,attackerCanReachOneStep,ok,attackerCanReachTwoSteps,oneStepToBadness,twoStepsToBadness','shortestAttackerPathsPlus') pyDatalog.create_terms('requires,Task,Hostname','remoteUserExploit','vulnExists','RiskForFunction','MaxR','OtherService','functionDownOrCompromised','probCompromised') pyDatalog.create_terms('cutConnection','VulnType','isAccount','C','C2','cost','TotalC','TotalC2','E','E2','notConnectsTo','notResidesOn','notCompromised','notRemoteUserExploit','notRemoteRootExploit','notLocalRootExploit''a','b','c','suspicious','t1','t2','t3','t4','t5','TacticNumber','moveHostTo','transitiveConnects','transitiveConnectsSecure') pyDatalog.create_terms('TestA','TestB','utility','FunctionA','resultingUtil','functionCompromised','functionUncompromised','FuncAUtil','allConnectionPaths','questionable','functionQuestionable','U','requiresConnection','networkConnectsTo','adHost','missingConnection','isType','allAttackerPathsWithTyping','ExploitAndTarget','ExploitAndTarget2','TargetType','questionableAtRisk','allAttackerPathsPlus','functionQuestionableWithinRiskPlus') pyDatalog.create_terms('Functionality','Attribute','Data','Service','Impact','requiresSecurityAttribute','FunctionB','FunctionC','functionRequires','implements','implementedF','requiresAllConnections') pyDatalog.create_terms('isType','validNewConnectsTo') pyDatalog.create_terms('vulnExistsWithAttributes','remoteRootExploitWithAttributes','compromisedWithAttributes','functionCompromisedWithAttributes') pyDatalog.create_terms('requiresSecurityAttribute','consumesDataWithAttributes','transitiveConnectsWithAttributes','producesData','requiresDataWithAttributes','C*K','IOK','AOK','CRequired','IRequired','ARequired','CImpact','IImpact','AImpact') pyDatalog.create_terms('CProvided','IProvided','AProvided','CProvided1','IProvided1','AProvided1','CProvided2','IProvided2','AProvided2','connectsToWithAttributes','consumesData','networkConnectsToWithAttributes','requiresFunction','transitiveConnectsWithAttributesOnPath') pyDatalog.create_terms('consumesDataWithC','consumesDataWithI','consumesDataWithA','consumesDataWithAttributeProblems','consumesDataWithAttributesNoAlternative','allCompromised','someCompromised','attackPaths','pathCompromisesFunctionWithCost','pathCompromisesService') pyDatalog.create_terms('isPath','X','Y','Z','pathCompromisesUtilities','pathCompromisesWithCost','worstCasePath','UtilPathPair','pathCompromisesFunctions','FList','worstCasePathValue','weightedWorstCastPath','probCapability','estimatedUtility','worstCasePathFromSource','SourceCost','compromisedCombo') #Logic for Below Cases @pyDatalog.predicate()
from pyDatalog import pyDatalog from pyDatalog import Logic import logging from pyDatalog import pyEngine pyEngine.Trace = True import copy import itertools import gc import sys from operator import itemgetter import pprint import time import csv #This is the risk metric Logic() pyDatalog.create_terms('connectsTo,residesOn,runs,TargetHost,SourceHost,DestHost,TargetService,SourceService,compromised,connectsToWithPrivileges,questionableWithinRisk,functionQuestionableWithinRisk','allAttackerPathsCostPlus','F','F2','FuncName','U','U2','Util','allAttackerPathsCostPlus','SS','TS','IS1','functionDown','functionalityFree','Prob') pyDatalog.create_terms('cTo,cToWithPrivileges,ServiceA,ServiceB,HostA,HostB,localRootExploit,remoteRootExploit,attackerConnectsToWithPrivileges,attackerReachable') pyDatalog.create_terms('allPaths,allAttackerPaths,P,P2,IntermediateService1,attackerCanReachOneStep,ok,attackerCanReachTwoSteps,oneStepToBadness,twoStepsToBadness','shortestAttackerPathsPlus') pyDatalog.create_terms('requires,Task,Hostname','remoteUserExploit','vulnExists','RiskForFunction','MaxR','OtherService','functionDownOrCompromised','probCompromised') pyDatalog.create_terms('cutConnection','VulnType','isAccount','C','C2','cost','TotalC','TotalC2','E','E2','notConnectsTo','notResidesOn','notCompromised','notRemoteUserExploit','notRemoteRootExploit','notLocalRootExploit''a','b','c','suspicious','t1','t2','t3','t4','t5','TacticNumber','moveHostTo','transitiveConnects','transitiveConnectsSecure') pyDatalog.create_terms('TestA','TestB','utility','FunctionA','resultingUtil','functionCompromised','functionUncompromised','FuncAUtil','allConnectionPaths','questionable','functionQuestionable','U','requiresConnection','networkConnectsTo','adHost','missingConnection','isType','allAttackerPathsWithTyping','ExploitAndTarget','ExploitAndTarget2','TargetType','questionableAtRisk','allAttackerPathsPlus','functionQuestionableWithinRiskPlus') pyDatalog.create_terms('Functionality','Attribute','Data','Service','Impact','requiresSecurityAttribute','FunctionB','FunctionC','functionRequires','implements','implementedF','requiresAllConnections') pyDatalog.create_terms('isType','validNewConnectsTo') pyDatalog.create_terms('vulnExistsWithAttributes','remoteRootExploitWithAttributes','componentCompromisedWithAttributes','functionCompromisedWithAttributes') pyDatalog.create_terms('requiresSecurityAttribute','consumesDataWithAttributes','transitiveConnectsWithAttributes','producesData','requiresDataWithAttributes','C*K','IOK','AOK','CRequired','IRequired','ARequired','CImpact','IImpact','AImpact') pyDatalog.create_terms('CProvided','IProvided','AProvided','CProvided1','IProvided1','AProvided1','CProvided2','IProvided2','AProvided2','connectsToWithAttributes','consumesData','networkConnectsToWithAttributes','requiresFunction','transitiveConnectsWithAttributesOnPath') pyDatalog.create_terms('consumesDataWithC','consumesDataWithI','consumesDataWithA','consumesDataWithAttributeProblems','consumesDataWithAttributesNoAlternative','allCompromised','someCompromised','attackPaths','pathCompromisesFunctionWithCost','pathCompromisesService') pyDatalog.create_terms('isPath','X','Y','Z','pathCompromisesUtilities','pathCompromisesWithCost','worstCasePath','UtilPathPair','pathCompromisesFunctions','FList','worstCasePathValue','weightedWorstCastPath','probCapability','estimatedUtility','worstCasePathFromSource','SourceCost','compromisedCombo') pyDatalog.create_terms('consumesDataOnlyGoodPath','noIdealConsumption','transitiveConnectsWithAttributesOnPathUnderAttack','consumesDataWithCUnderAttack','consumesDataWithIUnderAttack','consumesDataWithAUnderAttack','consumesDataWithAttributesUnderAttack','UMod') pyDatalog.create_terms('consumeseDataWithModifiedUtilityUnderAttack','pC','isSubType','isTypeOrSubType','isTypeOrSuperType','ComponentType','isVulnerable','existsExploit','Paths','Paths2','Exploits','AttackerMove','AttackerMoves','hasCredential','transitiveConnectsPath','consumesPath')
def loadDBRels(): words = {} langs = {} derivedLoadStr = "" etymologicallyLoadStr = "" etymologically_relatedLoadStr = "" etymologyLoadStr = "" has_derived_formLoadStr = "" variantLoadStr = "" loadStrs = ["","","","","",""] loadStrsNums = [0,0,0,0,0,0] numWords = 0 threads = [] saved = 0 #with open("../etymwn/etymwn.tsv") as f: with open("../etymwn/summaryDB.tsv") as f: i = 0 for l in f: tmp = l.split("\t") rel = tmp[1] if(rel == 'rel:etymological_origin_of' or rel == 'rel:is_derived_from'): #print(rel) continue lft = tmp[0] rgt = tmp[2] tmpLft = lft.split(': ') langLft, wordLft = tmpLft[0], tmpLft[1] tmpRgt = rgt.split(': ') langRgt, wordRgt = tmpRgt[0], tmpRgt[1].split("\n")[0] try: u = langs[langLft] except KeyError: langs[langLft] = langLft try: u = langs[langRgt] except KeyError: langs[langRgt] = langRgt try: u = words[wordLft] saved += 1 except KeyError: words[wordLft] = (i,1) try: u = words[wordRgt] saved += 1 except KeyError: words[wordRgt] = (i,2) for k in range(len(loadStrs)): if(loadStrsNums[k] > 15000): th = myThread(k, loadStrs[k], Logic(True)) th.start() threads.append(th) print(len(loadStrs[k].split("\n")),"remaining of", loadStrs[k].split("(")[0]) #load(loadStrs[k]) loadStrs[k] = "" loadStrsNums[k] = 0 if(wordRgt == 'anchor' or wordLft == 'anchor'): print(rel, "(", wordLft, ",", wordRgt, ")") #print("boop:", len(langs), len(words)) #print("beep:", languages.index(langLft), words[wordLft], languages.index(langRgt), words[wordRgt], rel) langLftIdx = langs[langLft] langRgtIdx = langs[langRgt] if(rel == 'rel:derived'): #loadStrs[0] += "derived("+str(langLftIdx)+", "+str(words[wordLft])+", "+str(langRgtIdx)+", "+str(words[wordRgt])+")\n" #loadStrsNums[0] += 1 + derived(str(langLftIdx), str(words[wordLft]), str(langRgtIdx), str(words[wordRgt])) elif(rel == 'rel:etymologically'): #loadStrs[1] += "etymologically("+str(langLftIdx)+", "+str(words[wordLft])+", "+str(langRgtIdx)+", "+str(words[wordRgt])+")\n" #loadStrsNums[1] += 1 + etymologically(str(langLftIdx), str(words[wordLft]), str(langRgtIdx), str(words[wordRgt])) elif(rel == 'rel:etymologically_related'): #loadStrs[2] += "etymologically_related("+str(langLftIdx)+", "+str(words[wordLft])+", "+str(langRgtIdx)+", "+str(words[wordRgt])+")\n" #loadStrsNums[2] += 1 + etymologically_related(str(langLftIdx), str(words[wordLft]), str(langRgtIdx), str(words[wordRgt])) elif(rel == 'rel:etymology'): #loadStrs[3] += "etymology("+str(langLftIdx)+", "+str(words[wordLft])+", "+str(langRgtIdx)+", "+str(words[wordRgt])+")\n" #loadStrsNums[3] += 1 + etymology(str(langLftIdx), str(words[wordLft]), str(langRgtIdx), str(words[wordRgt])) elif(rel == 'rel:has_derived_form'): #loadStrs[4] += "has_derived_form("+str(langLftIdx)+", "+str(words[wordLft])+", "+str(langRgtIdx)+", "+str(words[wordRgt])+")\n" #loadStrsNums[4] += 1 + has_derived_form(str(langLftIdx), str(words[wordLft]), str(langRgtIdx), str(words[wordRgt])) elif(rel.find('rel:variant') > -1): #loadStrs[5] += "variant("+str(langLftIdx)+", "+str(words[wordLft])+", "+str(langRgtIdx)+", "+str(words[wordRgt])+")\n" #loadStrsNums[5] += 1 + variant(str(langLftIdx), str(words[wordLft]), str(langRgtIdx), str(words[wordRgt])) i += 1 if i%100000==0: print("beep:", langs[langLft], words[wordLft], langs[langRgt], words[wordRgt], rel, len(words), len(langs)) print(i) #if i>300000: # break for k in range(len(loadStrs)): load(loadStrs[k]) print(len(loadStrs[k].split("\n")),"remaining of", loadStrs[k].split("(")[0]) loadStrs[k] = "" loadStrsNums[k] = 0 print("dbSize:",i, "lenWords:", len(words), "saved:", saved) print("Now waiting to join:", time.strftime("%H:%M:%S")) print() for t in threads: t.join() print("joined a thread:",time.strftime("%H:%M:%S")) print("Joined every thread:", time.strftime("%H:%M:%S")) return words, langs
def createEnvironment(instanceFile): setup(instanceFile) noVulnLogic = Logic(True) Logic() setup(instanceFile)
def _save_new_logic(self): self._logic = Logic()
def _restore_saved_logic(self): Logic(self._logic)
assertion_without_column(X0) <= (is_property_assertion(X0) & ~assertion_has_column(X0, Y0)) assertion_without_entity_type(X0) <= (is_assertion(X0) & ~assertion_has_entity_type(X0, Y0)) # relationship source name and target name properties relationship_has_source_name_property( X0, X1) <= (is_relationship_assertion(X0) & is_property(X1) & is_column(Y0) & assertion_has_source_entity_name_column(X0, Y0) & is_name_assertion(Y1) & assertion_has_property_column(Y1, Y0) & column_has_property_type(Y0, X1)) logic_engine = Logic(True) def flatten(nested_thing): if not isinstance(nested_thing, (list, tuple, set)): yield nested_thing else: for thing in nested_thing: for i in flatten(thing): yield i class AmbiguityException(Exception): pass