示例#1
0
    def clean_dirs(self, argDict):

        # ---------------------- #
        # remove rogue IR files

        if os.path.exists("./IR*"):
            logging.debug("rm -rf ./IR.db")
            os.system("rm -rf ./IR.db")

        # ---------------------- #
        # clear c4 tmp files

        try:
            C4_HOME_PATH = tools.getConfig(argDict["settings"], "DEFAULT",
                                           "C4_HOME_PATH", str)
            try:
                # for safety:
                C4_HOME_PATH = C4_HOME_PATH.replace("/c4_home", "")
                C4_HOME_PATH = C4_HOME_PATH.replace("//", "")

                assert (os.path.isdir(C4_HOME_PATH) == True)
                os.system("rm -rf " + C4_HOME_PATH + "/c4_home/*")

            except AssertionError:
                raise AssertionError(C4_HOME_PATH + " does not exist.")

        except ConfigParser.NoOptionError as e:
            logging.info(
                "  FATAL ERROR : option 'C4_HOME_PATH' not set in settings file '"
                + argDict["settings"] + "'. aborting.")
            raise e
示例#2
0
    def __init__(self, argDict={}, orik_rgg=None):
        self.argDict = argDict
        self.solver_type = "pycosat"

        if argDict == {} and orik_rgg == None:
            return

        # --------------------------------------------------------------- #
        # get configuration params

        try:
            self.POS_FACTS_ONLY = tools.getConfig( self.argDict[ "settings" ], \
                                                   "DEFAULT", \
                                                   "POS_FACTS_ONLY", \
                                                   bool )
        except ConfigParser.NoOptionError:
            self.POS_FACTS_ONLY = True
            logging.warning( "WARNING : no 'POS_FACTS_ONLY' defined in 'DEFAULT' section of " + \
                           self.argDict[ "settings" ] + "...running with POS_FACTS_ONLY==" + \
                           str( self.POS_FACTS_ONLY ) )

        # --------------------------------------------------------------- #
        # need a way to remove duplicates

        self.boolean_fmla_list = self.orik_rgg_to_fmla_list(orik_rgg)
        self.cnf_fmla_list = self.boolean_fmla_list_to_cnf_fmla_list()

        logging.debug( "  PYCOSAT SOLVER __INIT__ : self.boolean_fmla_list = " + \
                       str( self.boolean_fmla_list ) )
        logging.debug( "  PYCOSAT SOLVER __INIT__ : self.cnf_fmla_list = " + \
                       str( self.cnf_fmla_list ) )
示例#3
0
    def run_pure(self, allProgramData):

        allProgramLines = allProgramData[
            0]  # := list of every code line in the generated C4 program.
        tableList = allProgramData[
            1]  # := list of all tables in generated C4 program.

        # get full program
        fullprog = "".join(allProgramLines)

        # ----------------------------------------- #

        # initialize c4 instance
        self.lib.c4_initialize()
        self.c4_obj = self.lib.c4_make(None, 0)

        # ---------------------------------------- #
        # load program
        logging.debug("... loading prog ...")

        logging.debug("SUBMITTING SUBPROG : ")
        logging.debug(fullprog)
        c_prog = bytes(fullprog)
        self.lib.c4_install_str(self.c4_obj, c_prog)

        # ---------------------------------------- #
        # dump program results to file
        logging.debug("... dumping program ...")

        results_array = self.saveC4Results_toArray(tableList)

        # ---------------------------------------- #
        # close c4 program
        logging.debug("... closing C4 ...")

        self.lib.c4_destroy(self.c4_obj)
        self.lib.c4_terminate()

        try:
            C4_HOME_PATH = tools.getConfig(self.argDict["settings"], "DEFAULT",
                                           "C4_HOME_PATH", str)
            try:
                # for safety:
                C4_HOME_PATH = C4_HOME_PATH.replace("/c4_home", "")
                C4_HOME_PATH = C4_HOME_PATH.replace("//", "")

                assert (os.path.isdir(C4_HOME_PATH) == True)
                os.system("rm -rf " + C4_HOME_PATH + "/c4_home/*")

            except AssertionError:
                raise AssertionError(C4_HOME_PATH + " does not exist.")

        except ConfigParser.NoOptionError as e:
            logging.info(
                "  FATAL ERROR : option 'C4_HOME_PATH' not set in settings file '"
                + self.argDict["settings"] + "'. aborting.")
            raise e

        return results_array
示例#4
0
def neg_rewrite(cursor, argDict, settings_path, ruleMeta, factMeta,
                parsedResults):
    ''' Performs the negative rewrite of the dedalus program. '''

    # use the aggregate rewrite from the dm module, avoids issues with
    # aggregate rules.

    # determine if we are negating clocks
    logging.debug("COMBO-REWRITE: Begin Combinatorial Rewrite...")
    NEGATE_CLOCKS = True
    try:
        NEGATE_CLOCKS = tools.getConfig(settings_path, "DEFAULT",
                                        "NEGATE_CLOCKS", bool)
    except ConfigParser.NoOptionError:
        logging.warning("WARNING : no 'NEGAGTE_CLOCKS' defined in 'DEFAULT' section of " + \
                        "settings file ... running with NEGAGTE_CLOCKS=True")

    ruleMeta = dm.aggRewrites(ruleMeta, argDict)

    # add in active domain facts, this should only be done once in reality.
    factMeta = domain.getActiveDomain(cursor, factMeta, parsedResults)
    setTypes.setTypes(cursor, argDict, ruleMeta)

    while True:
        rulesToNegate = findNegativeRules(cursor, ruleMeta)
        rulesToNegateList = rulesToNegate.keys()

        # if there are no rules to negate, exit
        if len(rulesToNegateList) == 0:
            break

        # Negate the rules in the list
        ruleMeta, factMeta = negateRules( cursor, \
                                          argDict, \
                                          settings_path, \
                                          ruleMeta, \
                                          factMeta, \
                                          rulesToNegate, \
                                          parsedResults, \
                                          neg_clocks=NEGATE_CLOCKS)

    logging.debug("COMBO-REWRITE: Ending Combinatorial Rewrite.")

    return ruleMeta, factMeta
示例#5
0
def injectCustomFaults(allProgramData):

    # grab the custom fault, which is a list of clock fact strings, with quotes,
    # to remove from full clock relation
    customFaultList = tools.getConfig("CORE", "CUSTOM_FAULT", list)

    if customFaultList:
        # delete specified clock facts from program
        faultyProgramLines = []
        programLines = allProgramData[0]
        tableList = allProgramData[1]
        for line in programLines:
            line = line.replace(";", "")
            if line in customFaultList:
                pass
            else:
                faultyProgramLines.append(line + ";")

        return [faultyProgramLines, tableList]

    else:
        return allProgramData
示例#6
0
def combo(factMeta, ruleMeta, cursor, argDict):

    # ----------------------------------------- #

    logging.debug("  COMBO : running process...")

    settings_path = argDict["settings"]

    # ----------------------------------------- #
    # get parameters

    # ========== NW DOM DEF ========== #
    try:
        NW_DOM_DEF = tools.getConfig(settings_path, "DEFAULT", "NW_DOM_DEF",
                                     str)
        if NW_DOM_DEF == "sip":
            pass
        else:
            raise ValueError( "unrecognized NW_DOM_DEF option '" + NW_DOM_DEF + \
                              "' for combo NW rewrites. aborting..." )
    except ConfigParser.NoOptionError:
        raise ValueError( "no 'NW_DOM_DEF' defined in 'DEFAULT' section of " + settings_path + \
                          ". aborting..." )

    # ----------------------------------------- #
    # replace unused variables with wildcards

    if NW_DOM_DEF == "sip":
        ruleMeta = nw_tools.replace_unused_vars(ruleMeta, cursor)

    # ----------------------------------------- #
    # rewrite rules with fixed data
    # in the head

    ruleMeta, factMeta = nw_tools.fixed_data_head_rewrites(
        ruleMeta, factMeta, argDict)

    # ----------------------------------------- #
    # rewrite rules with aggregate functions
    # in the head

    ruleMeta = nw_tools.aggRewrites(ruleMeta, argDict)

    # ----------------------------------------- #
    # enforce a uniform goal attribute lists

    ruleMeta = nw_tools.setUniformAttList(ruleMeta, cursor)

    logging.debug("  COMBO : len( ruleMeta ) after setUniformAttList = " +
                  str(len(ruleMeta)))

    # ----------------------------------------- #
    # enforce unique existential attributes
    # per rule

    ruleMeta = nw_tools.setUniqueExistentialVars(ruleMeta)

    # ----------------------------------------- #
    # replace time att references

    ruleMeta = dm_time_att_replacement.dm_time_att_replacement(
        ruleMeta, cursor, argDict)

    # ----------------------------------------- #
    # append rids to all rel names and
    # generate cps of the original rules
    # (do not reference these in final programs)

    if NW_DOM_DEF == "sip":

        # future optimization : do this lazily:
        ruleMeta.extend(nw_tools.generate_orig_cps(ruleMeta))

    # ----------------------------------------- #
    # append rids to all rel names and
    # generate cps of the original rules
    # (do not reference these in final programs)

    if NW_DOM_DEF == "sip":

        # future optimization : do this lazily:
        not_templates, ruleMeta = get_not_templates_combo(factMeta, ruleMeta)

        #for r in ruleMeta :
        #  print str( r.rid ) + " : " + dumpers.reconstructRule( r.rid, r.cursor )
        #sys.exit( "blee" )

    # ----------------------------------------- #
    # generate a map of all rids to corresponding
    # rule meta object pointers.

    if NW_DOM_DEF == "sip":
        rid_to_rule_meta_map = nw_tools.generate_rid_to_rule_meta_map(ruleMeta)

    # ----------------------------------------- #
    # build all de morgan's rules

    COUNTER = 0
    while nw_tools.stillContainsNegatedIDBs(ruleMeta, cursor):

        logging.debug("  COMBO : COUNTER = " + str(COUNTER))
        if COUNTER == 3:
            print "////////////"
            for r in ruleMeta:
                print dumpers.reconstructRule(r.rid, r.cursor)
            sys.exit("wtf?")

        # ----------------------------------------- #
        # check if any rules include negated idb
        # subgoals

        targetRuleMetaSets = nw_tools.getRuleMetaSetsForRulesCorrespondingToNegatedSubgoals( ruleMeta, \
                                                                                             cursor )

        #if COUNTER == 2 :
        #  print targetRuleMetaSets[0][0]
        #  for r in targetRuleMetaSets[0][1] :
        #    print c4_translator.get_c4_line( r.ruleData, "rule" )
        #  print "////////////"
        #  for r in ruleMeta :
        #    print dumpers.reconstructRule( r.rid, r.cursor )
        #  sys.exit( "asdf" )

        # ----------------------------------------- #
        # break execution if no rules contain negated IDBs.
        # should not hit this b/c of loop condition.

        if len(targetRuleMetaSets) < 1:
            return []

        # ----------------------------------------- #
        # create the de morgan rewrite rules.
        # incorporates domcomp and existential
        # domain subgoals.

        if NW_DOM_DEF == "sip":
            ruleMeta = do_combo_sip( factMeta, \
                                     ruleMeta, \
                                     targetRuleMetaSets, \
                                     not_templates, \
                                     rid_to_rule_meta_map, \
                                     cursor, \
                                     argDict )
        else:
            raise ValueError( "unrecognized NW_DOM_DEF option '" + NW_DOM_DEF + \
                              "'. aborting..." )

        #for r in ruleMeta :
        #  print str( r.rid ) + " : " + dumpers.reconstructRule( r.rid, r.cursor )
        #sys.exit( "blast" )

        # ----------------------------------------- #
        # update rid to rule meta map

        rid_to_rule_meta_map = nw_tools.generate_rid_to_rule_meta_map(ruleMeta)

        #if COUNTER == 2 :
        #  for r in ruleMeta :
        #    print str( r.rid ) + " : " + dumpers.reconstructRule( r.rid, r.cursor )
        #  sys.exit( "blahasdf" )

        # increment loop counter
        COUNTER += 1

    # ----------------------------------------- #
    # replace unused variables with wildcards

    if NW_DOM_DEF == "sip":
        ruleMeta = nw_tools.replace_unused_vars(ruleMeta, cursor)

    # ----------------------------------------- #
    # filter out unused not_ rules

    if NW_DOM_DEF == "sip":
        ruleMeta = nw_tools.delete_unused_not_rules(ruleMeta, cursor)

    for r in ruleMeta:
        print str(r.rid) + " : " + dumpers.reconstructRule(r.rid, r.cursor)
    sys.exit("blahasdf")

    logging.debug("  COMBO : ...done.")
    return factMeta, ruleMeta
示例#7
0
'''

import inspect, os, sys

# ------------------------------------------------------ #
# import sibling packages HERE!!!
if not os.path.abspath(__file__ + "/../..") in sys.path:
    sys.path.append(os.path.abspath(__file__ + "/../.."))

from utils import clockTools, tools, dumpers
# ------------------------------------------------------ #

#############
#  GLOBALS  #
#############
DEDALUSREWRITER_DEBUG = tools.getConfig("DEDT", "DEDALUSREWRITER_DEBUG", bool)
DEDALUSREWRITER_DUMPS_DEBUG = tools.getConfig("DEDT",
                                              "DEDALUSREWRITER_DUMPS_DEBUG",
                                              bool)

timeAtt_snd = "SndTime"
timeAtt_deliv = "DelivTime"
rewrittenFlag = "True"


############################
#  GET DEDUCTIVE RULE IDS  #
############################
def getDeductiveRuleIDs(cursor):
    # deductive rules are not next or async
    cursor.execute(
示例#8
0
'''

import inspect, os, sys

# ------------------------------------------------------ #
# import sibling packages HERE!!!
if not os.path.abspath(__file__ + "/../..") in sys.path:
    sys.path.append(os.path.abspath(__file__ + "/../.."))

from utils import dumpers, parseCommandLineInput, tools
# ------------------------------------------------------ #

#############
#  GLOBALS  #
#############
CLOCKRELATION_DEBUG = tools.getConfig("DEDT", "CLOCKRELATION_DEBUG", bool)
COMM_MODEL = tools.getConfig("DEDT", "COMM_MODEL", str)


#########################
#  INIT CLOCK RELATION  #
#########################
# input IR database cursor and cmdline input
# create initial clock relation
# output nothing
def initClockRelation(cursor, argDict):

    # check if node topology defined in Fact relation
    nodeFacts = cursor.execute(
        '''SELECT name FROM Fact WHERE Fact.name == "node"''')
示例#9
0
文件: deMorgans.py 项目: dotnwat/orik
import sympy

# ------------------------------------------------------ #
# import sibling packages HERE!!!
if not os.path.abspath( __file__ + "/../.." ) in sys.path :
  sys.path.append( os.path.abspath( __file__ + "/../.." ) )

from dedt  import Rule
from utils import tools, dumpers
# ------------------------------------------------------ #


#############
#  GLOBALS  #
#############
NEGATIVEWRITES_DEBUG = tools.getConfig( "DEDT", "NEGATIVEWRITES_DEBUG", bool )

arithOps = [ "+", "-", "*", "/" ]


################
#  DO MORGANS  #
################
# generates a set of new rules by applying deMorgan's law on the input rule set
# and adds domain constraint subgoals where appropriate. 
def doDeMorgans( parentRID, ruleRIDs, cursor ) :

  print "==========================================="
  print "... running DO DEMORGANS from deMorgans ..."
  print "==========================================="
示例#10
0
import dumpers_c4

# ------------------------------------------------------ #
# import sibling packages HERE!!!
if not os.path.abspath( __file__ + "/../../.." ) in sys.path :
  sys.path.append( os.path.abspath( __file__ + "/../../.." ) )

from utils import tools
from dedt  import Rule
# ------------------------------------------------------ #


#############
#  GLOBALS  #
#############
C4_TRANSLATOR_DEBUG   = tools.getConfig( "DEDT", "C4_TRANSLATOR_DEBUG", bool )
C4_TRANSLATOR_DEBUG_1 = tools.getConfig( "DEDT", "C4_TRANSLATOR_DEBUG1", bool )


#####################
#  EXISTING DEFINE  #
#####################
# input subgoal name and list of currently accumulated define statements
# determine if a define already exists for the relation indicated by the subgoal
# output boolean

def existingDefine( name, definesNames ) :

  if name in definesNames :
    return True
  else :
示例#11
0
文件: dedt.py 项目: KDahlgren/iapyx
def rewrite_to_datalog( argDict, factMeta, ruleMeta, cursor ) :

  logging.debug( "  REWRITE : running process..." )

  settings_path = argDict[ "settings" ]
  EOT           = argDict[ "EOT" ]

  for rule in ruleMeta :
    rid = rule.rid
    cursor.execute( "SELECT attID,attName FROM GoalAtt WHERE rid=='" + str( rid ) + "'" )
    goal_atts = cursor.fetchall()
    logging.debug( "  DEDT : len( goal_atts ) = " + str( len( goal_atts )) )
    goal_atts = tools.toAscii_multiList( goal_atts )
    logging.debug( "  DEDT : len( goal_atts ) = " + str( len( goal_atts )) )
    logging.debug( "  DEDT : goal_atts (0) = " + str( goal_atts ) )
    logging.debug( "  DEDT : r = " + dumpers.reconstructRule( rid, cursor ) )

  # ----------------------------------------------------------------------------- #
  # dedalus rewrite

  logging.debug( "  REWRITE : calling dedalus rewrites..." )

  allMeta  = dedalusRewriter.rewriteDedalus( argDict, factMeta, ruleMeta, cursor )
  factMeta = allMeta[0]
  ruleMeta = allMeta[1]

  # be sure to fill in all the type info for the new rule definitions
  #setTypes.setTypes( cursor, argDict, ruleMeta )

  for rule in ruleMeta :
    rid = rule.rid
    cursor.execute( "SELECT attID,attName FROM GoalAtt WHERE rid=='" + str( rid ) + "'" )
    goal_atts = cursor.fetchall()
    goal_atts = tools.toAscii_multiList( goal_atts )
    #logging.debug( "  DEDT : goal_atts (1) = " + str( goal_atts ) )
    logging.debug( "  DEDT : rule.ruleData = " + str( rule.ruleData ) )

  # ----------------------------------------------------------------------------- #
  # other rewrites
  # first get parameters for which rewrites to run.

  # ========== WILDCARD ========== #
  try :
    rewriteWildcards = tools.getConfig( settings_path, "DEFAULT", "WILDCARD_REWRITES", bool )
  except ConfigParser.NoOptionError :
    logging.warning( "WARNING : no 'WILDCARD_REWRITES' defined in 'DEFAULT' section of " + settings_path + \
                     "...running without wildcard rewrites." )
    rewriteWildcards = False
    pass

  # ========== DM ========== #
  try :
    RUN_DM = tools.getConfig( settings_path, "DEFAULT", "DM", bool )
  except ConfigParser.NoOptionError :
    logging.warning( "WARNING : no 'DM' defined in 'DEFAULT' section of " + settings_path + \
                     "...running without dm rewrites" )
    RUN_DM = False
    pass

  # ========== NW_DOM_DEF ========== #
  try :
    NW_DOM_DEF = tools.getConfig( settings_path, "DEFAULT", "NW_DOM_DEF", str )
  except ConfigParser.NoOptionError :
    raise Exception( "no 'NW_DOM_DEF' defined in 'DEFAULT' section of " + settings_path + \
                     ". aborting..." )

  # ========== COMBO ========== #
  try:
    RUN_COMB = tools.getConfig( settings_path, "DEFAULT", "COMB", bool )
  except ConfigParser.NoOptionError :
    logging.info( "WARNING : no 'COMB' defined in 'DEFAULT' section of " + settings_path + \
                  "...running without combo rewrites" )
    RUN_COMB = False
    pass

  # ========== IEDB ========== #
  try :
    RUN_IEDB_REWRITES = tools.getConfig( settings_path, "DEFAULT", "IEDB_REWRITES", bool )
  except ConfigParser.NoOptionError :
    logging.info( "WARNING : no 'IEDB_REWRITES' defined in 'DEFAULT' section of " + settings_path + \
                  "...running without iedb rewrites" )
    RUN_IEDB_REWRITES = False
    pass

  # ----------------------------------------------------------------------------- #
  # do wildcard rewrites
  # always do wildcard rewrites in prep for negative writes.

  if rewriteWildcards or \
     ( NW_DOM_DEF == "sip"         and \
       ( argDict[ "neg_writes" ] == "dm" or \
         argDict[ "neg_writes" ] == "combo" ) ) :

    logging.debug( "  REWRITE : calling wildcard rewrites..." )

    ruleMeta = rewrite_wildcards.rewrite_wildcards( ruleMeta, cursor )

#    for rule in ruleMeta :
#      #logging.debug( "rule.ruleData = " + str( rule.ruleData ) )
#      logging.debug( "  REWRITE : (1) r = " + dumpers.reconstructRule( rule.rid, rule.cursor ) )
#    #sys.exit( "blah2" )
  
#    for rule in ruleMeta :
#      logging.debug( "  DEDT : rule.ruleData (2) = " + str( rule.ruleData ) )
  
    # be sure to fill in all the type info for the new rule definitions
    logging.debug( "  REWRITE : running setTypes after wildcard rewrites." )
    setTypes.setTypes( cursor, argDict, ruleMeta )
  
    update_goal_types( ruleMeta )
  else :
    setTypes.setTypes( cursor, argDict, ruleMeta )

  # ----------------------------------------------------------------------------- #
  # iedb rewrites 

  if RUN_IEDB_REWRITES                 or \
     ( NW_DOM_DEF == "sip"         and \
       ( argDict[ "neg_writes" ] == "dm" or \
         argDict[ "neg_writes" ] == "combo" ) ) :

    logging.debug( "  REWRITE : calling iedb rewrites..." )
    factMeta, ruleMeta = iedb_rewrites.iedb_rewrites( factMeta, \
                                                      ruleMeta, \
                                                      cursor, \
                                                      settings_path )

#    for rule in ruleMeta :
#      rid = rule.rid
#      cursor.execute( "SELECT attID,attName FROM GoalAtt WHERE rid=='" + str( rid ) + "'" )
#      goal_atts = cursor.fetchall()
#      goal_atts = tools.toAscii_multiList( goal_atts )
#      logging.debug( "  DEDT : goal_atts (3) = " + str( goal_atts ) )

    #for rule in ruleMeta :
    #  print c4_translator.get_c4_line( rule.ruleData, "rule" )
    #for fact in factMeta :
    #  print c4_translator.get_c4_line( fact.factData, "fact" )
    #sys.exit( "asdf" )

    # be sure to fill in all the type info for the new rule definitions
    logging.debug( "  REWRITE : running setTypes after iedb rewrites." )
    setTypes.setTypes( cursor, argDict, ruleMeta )

  # ----------------------------------------------------------------------------- #
  # do dm rewrites

  if argDict[ "neg_writes" ] == "dm" :

    logging.debug( "  REWRITE : calling dm rewrites..." )
    factMeta, ruleMeta = dm.dm( factMeta, ruleMeta, cursor, argDict ) # returns new ruleMeta

    logging.debug( "  REWRITE : final dm program lines:" )
    for rule in ruleMeta :
      logging.debug( dumpers.reconstructRule( rule.rid, rule.cursor ) )
    #sys.exit( "blah" )

    # be sure to fill in all the type info for the new rule definitions
    logging.debug( "  REWRITE : running setTypes after dm rewrites." )
    setTypes.setTypes( cursor, argDict, ruleMeta )

  # ----------------------------------------------------------------------------- #
  # do combo rewrites

  if argDict[ "neg_writes" ] == "comb" :

    # collect the results from the original program
    original_prog = c4_translator.c4datalog( argDict, cursor )
    results_array = c4_evaluator.runC4_wrapper( original_prog, argDict )
    parsedResults = tools.getEvalResults_dict_c4( results_array )

    # run the neg rewrite for combinatorial approach
    # returns a new ruleMeta
    logging.debug( "  REWRITE : calling combo rewrites..." )
    ruleMeta, factMeta = combitorialNegRewriter.neg_rewrite( cursor, \
                                                             argDict, \
                                                             settings_path, 
                                                             ruleMeta, \
                                                             factMeta, \
                                                             parsedResults ) 

  if argDict[ "neg_writes" ] == "combo" :

    logging.debug( "  REWRITE : calling combo rewrites..." )
    factMeta, ruleMeta = combo.combo( factMeta, ruleMeta, cursor, argDict )

    #for r in ruleMeta :
    #  print dumpers.reconstructRule( r.rid, r.cursor )
    #sys.exit( "f**k" )

#  for rule in ruleMeta :
#    rid = rule.rid
#    cursor.execute( "SELECT attID,attName FROM GoalAtt WHERE rid=='" + str( rid ) + "'" )
#    goal_atts = cursor.fetchall()
#    goal_atts = tools.toAscii_multiList( goal_atts )
#    logging.debug( "  DEDT : goal_atts (2) = " + str( goal_atts ) )

  # ----------------------------------------------------------------------------- #
  # provenance rewrites

  #logging.debug( "  REWRITE : before prov dump :" )
  #for rule in ruleMeta :
  #  print rule
  #  logging.debug( "  REWRITE : (0) r = " + printRuleWithTypes( rule.rid, cursor ) )
  #sys.exit( "blah2" )

  # add the provenance rules to the existing rule set
  logging.debug( "  REWRITE : calling provenance rewrites..." )
  ruleMeta.extend( provenanceRewriter.rewriteProvenance( ruleMeta, cursor, argDict ) )

  #for rule in ruleMeta :
  #  #logging.debug( "rule.ruleData = " + str( rule.ruleData ) )
  #  logging.debug( "  REWRITE : (1) rid = " + str( rule.rid ) + " : " + dumpers.reconstructRule( rule.rid, rule.cursor ) )
  #sys.exit( "blah2" )

  # be sure to fill in all the type info for the new rule definitions
  logging.debug( "  REWRITE : running setTypes after provenance rewrites." )
  setTypes.setTypes( cursor, argDict, ruleMeta )

  # ----------------------------------------------------------------------------- #

#  for rule in ruleMeta :
#    logging.debug( "rule.ruleData = " + str( rule.ruleData ) )
#    logging.debug( "  REWRITE : (2) r = " + dumpers.reconstructRule( rule.rid, rule.cursor ) )
#  sys.exit( "blah2" )

  logging.debug( "  REWRITE : ...done." )

  return [ factMeta, ruleMeta ]
示例#12
0
'''

import inspect, os, sys

# ------------------------------------------------------ #
# import sibling packages HERE!!!
if not os.path.abspath(__file__ + "/../..") in sys.path:
    sys.path.append(os.path.abspath(__file__ + "/../.."))

from utils import tools, dumpers
# ------------------------------------------------------ #

#############
#  GLOBALS  #
#############
DUMPERS_C4_DEBUG = tools.getConfig("DEDT", "DUMPERS_C4_DEBUG", bool)


#############
#  DUMP IR  #
#############
# dump the contents of an entire IR database
def dumpIR(cursor, db_dump_save_path):

    # get facts
    cursor.execute("SELECT fid FROM Fact")
    fid_all = cursor.fetchall()
    fid_all = tools.toAscii_list(fid_all)

    full_facts = []
    for fid in fid_all:
示例#13
0
文件: Fact.py 项目: dotnwat/orik
Fact.py
   Defines the Fact class.
   Establishes all relevant attributes and get/set methods.
'''

import inspect, os, sqlite3, sys

# ------------------------------------------------------ #
# import sibling packages HERE!!!
if not os.path.abspath(__file__ + "/../..") in sys.path:
    sys.path.append(os.path.abspath(__file__ + "/../.."))

from utils import tools
# ------------------------------------------------------ #

DEBUG = tools.getConfig("DEDT", "FACT_DEBUG", bool)


class Fact:
    # attributes
    fid = ""
    cursor = None

    # constructor
    def __init__(self, fid, cursor):
        self.fid = fid
        self.cursor = cursor

    # ------------------------------------- #
    #                GET                    #
示例#14
0
def regProv(regRule, nameAppend, cursor, argDict):

    try:
        USING_MOLLY = tools.getConfig(argDict["settings"], "DEFAULT",
                                      "USING_MOLLY", bool)
    except ConfigParser.NoOptionError:
        logging.warning(
            "WARNING : no 'USING_MOLLY' defined in 'DEFAULT' section of settings.ini ...assume running without molly."
        )
        USING_MOLLY = False

    logging.debug("  REG PROV : running regProv...")
    logging.debug("  REG PROV : regRule              = " + str(regRule))
    logging.debug("  REG PROV : regRule.relationName = " +
                  regRule.relationName)

    # ------------------------------------------------------ #
    # generate a random ID for the new provenance rule

    rid = tools.getIDFromCounters("rid")

    # ------------------------------------------------------ #
    # initialize the prov rule to old version of
    # meta rule

    new_provmeta_ruleData = copy.deepcopy(regRule.ruleData)

    # ------------------------------------------------------ #
    # the provenance rule name ends with "_prov" appended
    # with a unique number

    new_provmeta_ruleData[
        "relationName"] = new_provmeta_ruleData["relationName"] + nameAppend

    # ------------------------------------------------------ #
    # the goal att list consists of all subgoal atts

    provGoalAttList = []

    # grab all goal atts
    goalAttList = new_provmeta_ruleData["goalAttList"]

    # save to provenance rule goal attribute list
    provGoalAttList.extend(goalAttList)

    # extract and save the time argument as the last element in the attribute list
    #  provGoalAttList_last = provGoalAttList[-1]
    #  provGoalAttList      = provGoalAttList[:-1]

    # ------------------------------------------------------------------ #
    # grab all subgoal atts

    subgoalListOfDicts = new_provmeta_ruleData["subgoalListOfDicts"]
    for subgoal in subgoalListOfDicts:
        subgoalAttList = subgoal["subgoalAttList"]
        for att in subgoalAttList:

            logging.debug("  REG PROV : att in subgoalAttList = " + att)

            # don't duplicate atts in the prov head
            if not att in provGoalAttList:

                logging.debug("  REG PROV : att not in " +
                              str(provGoalAttList))

                # do not add wildcards and fixed integer inputs
                if not att == "_" and not att.isdigit():

                    logging.debug("  REG PROV : att not '_' and not isdigit")

                    # do not add fixed string inputs
                    # do not add unused variables (huh? why? messes up not_rule arities)
                    #if not isFixedString( att ) and not isUnused( subgoalListOfDicts, new_provmeta_ruleData[ "eqnDict" ], att ) :
                    if not isFixedString(att):
                        provGoalAttList.append(att)

    # ------------------------------------------------------------------ #
    # add the time argument last

#  if not provGoalAttList_last in provGoalAttList :
#    provGoalAttList.append( provGoalAttList_last )

# ------------------------------------------------------------------ #

    logging.debug("  REG PROV : new_provmeta_ruleData['relationName'] = " +
                  new_provmeta_ruleData["relationName"])
    logging.debug("  REG PROV : provGoalAttList                       = " +
                  str(provGoalAttList))

    # sort goal atts to ensure NRESERVED, NRESERVED+1, and MRESERVED are rightmost
    if USING_MOLLY:
        provGoalAttList = sortGoalAttList(provGoalAttList)

    logging.debug("  REG PROV : provGoalAttList (after sorting)       = " +
                  str(provGoalAttList))

    # save to rule data
    new_provmeta_ruleData["goalAttList"] = provGoalAttList

    # ------------------------------------------------------ #
    # preserve adjustments by instantiating the new meta rule
    # as a Rule

    provRule = Rule.Rule(rid, new_provmeta_ruleData, cursor)
    provRule.orig_rule_ptr = regRule
    provRule.rule_type = regRule.rule_type

    logging.debug("  REG PROV : regRule                  = " + str(regRule))
    logging.debug("  REG PROV : regRule.orig_goalAttList = " +
                  str(regRule.orig_goalAttList))
    logging.debug("  REG PROV : provRule.orig_rule_ptr   = " +
                  str(provRule.orig_rule_ptr))
    logging.debug("  REG PROV : provRule.orig_rule_ptr.orig_goalAttList = " +
                  str(provRule.orig_rule_ptr.orig_goalAttList))
    logging.debug("  REG PROV : returning prov rule id " + str(rid) +
                  " provRule.ruleData = " + str(provRule.ruleData))
    logging.debug("  REG PROV : provRule.relationName       = " +
                  provRule.relationName)
    logging.debug("  REG PROV : provRule.goalAttList        = " +
                  str(provRule.goalAttList))
    logging.debug("  REG PROV : provRule.goalTimeArg        = " +
                  provRule.goalTimeArg)
    logging.debug("  REG PROV : provRule.subgoalListOfDicts = " +
                  str(provRule.subgoalListOfDicts))
    logging.debug("  REG PROV : provRule.eqnDict            = " +
                  str(provRule.eqnDict))
    logging.debug("  REG PROV : provRule.orig_rule_ptr      = " +
                  str(provRule.orig_rule_ptr))
    logging.debug("  REG PROV : provRule.orig_rule_ptr.goalAttList = " +
                  str(provRule.orig_rule_ptr.goalAttList))

    #if provRule.relationName == "not_missing_log_prov8" :
    #  sys.exit( "blah" )

    # ------------------------------------------------------ #
    # replace original time goal atts

    #  if tools.getConfig( argDict[ "settings" ], "DEFAULT", "DM", bool ) :
    #    provRule = replaceTimeAtts( provRule )
    #
    #  logging.debug( "  REG PROV : returning prov rule id " + str( rid ) + " provRule.ruleData = " + str( provRule.ruleData ) )
    #  logging.debug( "  REG PROV : provRule.relationName       = " + provRule.relationName )
    #  logging.debug( "  REG PROV : provRule.goalAttList        = " + str( provRule.goalAttList ) )
    #  logging.debug( "  REG PROV : provRule.goalTimeArg        = " + provRule.goalTimeArg )
    #  logging.debug( "  REG PROV : provRule.subgoalListOfDicts = " + str( provRule.subgoalListOfDicts ) )
    #  logging.debug( "  REG PROV : provRule.eqnDict            = " + str( provRule.eqnDict ) )

    return provRule
示例#15
0
def rewriteInductive( argDict, metarule, cursor ) :

  logging.debug( "  REWRITE INDUCTIVE : running process..." )
  logging.debug( "  REWRITE INDUCTIVE : metarule.ruleData = " + str( metarule.ruleData ) )

  # ------------------------------------------------------ #
  # grab the next rule handling method

  try :
    NEXT_RULE_HANDLING = tools.getConfig( argDict[ "settings" ], \
                                          "DEFAULT", \
                                          "NEXT_RULE_HANDLING", \
                                          str )

  except ConfigParser.NoOptionError :
    logging.info( "WARNING : no 'NEXT_RULE_HANDLING' defined " + \
                  "in 'DEFAULT' section of settings file." )
    tools.bp( __name__, inspect.stack()[0][3], "FATAL ERROR : NEXT_RULE_HANDLING parameter not " + \
      "specified in DEFAULT section of settings file. use 'USE_AGGS', 'SYNC_ASSUMPTION', or " + \
      "'USE_NEXT_CLOCK' only." )

  # sanity check next rule handling value
  if NEXT_RULE_HANDLING == "USE_AGGS"          or \
     NEXT_RULE_HANDLING == "SYNC_ASSUMPTION"   or \
     NEXT_RULE_HANDLING == "USE_NEXT_CLOCK" :
    pass
  else :
    tools.bp( __name__, inspect.stack()[0][3], "FATAL ERROR : " + \
       "unrecognized NEXT_RULE_HANDLING value '" + NEXT_RULE_HANDLING + \
       "'. use 'USE_AGGS', 'SYNC_ASSUMPTION', or 'USE_NEXT_CLOCK' only." )

  # ------------------------------------------------------ #
  # dedalus rewrites overwrite the original rules
  # so, grab the original rid

  rid = metarule.rid

  # ------------------------------------------------------ #
  # initialize new version of meta rule to old version of
  # meta rule

  new_metarule_ruleData = metarule.ruleData

  # ------------------------------------------------------ #
  # add SndTime+1/DelivTime to goal attribute list

  if NEXT_RULE_HANDLING == "USE_AGGS" :
    new_metarule_ruleData[ "goalAttList"].append( timeAtt_snd+"+1" )
  elif NEXT_RULE_HANDLING == "SYNC_ASSUMPTION" :
    new_metarule_ruleData[ "goalAttList"].append( timeAtt_deliv )
    #new_metarule_ruleData[ "eqnDict" ][ "MRESERVED==NRESERVED+1" ] = { "variableList" : [ "MRESRVED", "NRESERVED" ] }
  elif NEXT_RULE_HANDLING == "USE_NEXT_CLOCK" :
    new_metarule_ruleData[ "goalAttList"].append( timeAtt_deliv )

  # ------------------------------------------------------ #
  # remove goal time arg

  new_metarule_ruleData[ "goalTimeArg"] = ""

  # ------------------------------------------------------ #
  # add SndTime (or given numeric time argument) 
  # to all subgoal attribute lists

  for subgoal in new_metarule_ruleData[ "subgoalListOfDicts" ] :

    # ------------------------------------------------------ #
    # CASE : subgoal time argument in an integer
    if subgoal[ "subgoalTimeArg" ].isdigit() :
      subgoal[ "subgoalAttList" ].append( subgoal[ "subgoalTimeArg" ] )
      subgoal[ "subgoalTimeArg" ] = "" # erase time arg after assignment

    # ------------------------------------------------------ #
    # CASE : subgoal has no time argument
    else :
      subgoal[ "subgoalAttList" ].append( timeAtt_snd )

  # ------------------------------------------------------ #
  # add clock subgoal

  # grab the first attribute in a subgoal
  # observe the parser ensures the first attributes 
  # in all inductive rule subgoals

  firstAtt = new_metarule_ruleData[ "subgoalListOfDicts" ][0][ "subgoalAttList" ][0]

  # build the new clock subgoal dict
  # format :
  #   { subgoalName : 'subgoalNameStr', 
  #     subgoalAttList : [ data1, ... , dataN ], 
  #     polarity : 'notin' OR '', 
  #     subgoalTimeArg : <anInteger> }

  if NEXT_RULE_HANDLING == "USE_AGGS" or NEXT_RULE_HANDLING == "SYNC_ASSUMPTION" :

    if NEXT_RULE_HANDLING == "USE_AGGS" :
      clock_subgoalAttList = [ firstAtt, "_", timeAtt_snd, "_" ]

    elif NEXT_RULE_HANDLING == "SYNC_ASSUMPTION" :
      clock_subgoalAttList = [ firstAtt, "_", timeAtt_snd, "MRESERVED" ] # only works for synchronous model.

    clock_subgoalName    = "clock"
    clock_polarity       = "" # clocks are positive until proven negative.
    clock_subgoalTimeArg = ""

  elif NEXT_RULE_HANDLING == "USE_NEXT_CLOCK" :

    clock_subgoalAttList = [ firstAtt, "_", timeAtt_snd, "MRESERVED" ]
    clock_subgoalName    = "next_clock"
    clock_polarity       = "" # clocks are positive until proven negative.
    clock_subgoalTimeArg = ""

  clock_subgoalDict                      = {}
  clock_subgoalDict[ "subgoalName" ]     = clock_subgoalName
  clock_subgoalDict[ "subgoalAttList" ]  = clock_subgoalAttList
  clock_subgoalDict[ "polarity" ] = clock_polarity
  clock_subgoalDict[ "subgoalTimeArg" ]  = clock_subgoalTimeArg

  # ------------------------------------------------------ #
  # add the clock subgoal to the subgoal list for this rule

  new_metarule_ruleData[ "subgoalListOfDicts" ].append( clock_subgoalDict )

  # ------------------------------------------------------ #
  # preserve adjustments by instantiating the new meta rule
  # as a Rule

  new_metarule = Rule.Rule( rid, new_metarule_ruleData, cursor )

  # ------------------------------------------------------ #
  # populate rule type

  new_metarule.rule_type = "inductive"


  logging.debug( "  REWRITE INDUCTIVE : returning new meta rule with rule data = " + str( new_metarule.ruleData ) )
  return new_metarule
示例#16
0
    def __init__(self, argDict={}, orik_rgg=None):
        self.argDict = argDict
        self.solver_type = "z3"

        if argDict == {} and orik_rgg == None:
            return

        # --------------------------------------------------------------- #
        # get configuration params

        # ========= POS_FACTS_ONLY ========== #
        try:
            self.POS_FACTS_ONLY = tools.getConfig( self.argDict[ "settings" ], \
                                                   "DEFAULT", \
                                                   "POS_FACTS_ONLY", \
                                                   bool )
        except ConfigParser.NoOptionError:
            self.POS_FACTS_ONLY = True
            logging.warning( "WARNING : no 'POS_FACTS_ONLY' defined in 'DEFAULT' section of " + \
                           self.argDict[ "settings" ] + "...running with POS_FACTS_ONLY==" + \
                           str( self.POS_FACTS_ONLY ) )

        # ========= USE_INTERMEDIATE_SIMPLIFICATIONS ========== #
        try:
            self.USE_INTERMEDIATE_SIMPLIFICATIONS = tools.getConfig( self.argDict[ "settings" ], \
                                                              "DEFAULT", \
                                                              "USE_INTERMEDIATE_SIMPLIFICATIONS", \
                                                              bool )
        except ConfigParser.NoOptionError:
            self.USE_INTERMEDIATE_SIMPLIFICATIONS = False
            logging.warning( "WARNING : no 'USE_INTERMEDIATE_SIMPLIFICATIONS' " + \
                             "defined in 'DEFAULT' section of "+ \
                           self.argDict[ "settings" ] + "...running with " + \
                           "USE_INTERMEDIATE_SIMPLIFICATIONS ==" + \
                           str( self.USE_INTERMEDIATE_SIMPLIFICATIONS ) )

        # ========= CLOCKS_ONLY ========== #
        try:
            self.CLOCKS_ONLY = tools.getConfig( self.argDict[ "settings" ], \
                                                "DEFAULT", \
                                                "CLOCKS_ONLY", \
                                                bool )
        except ConfigParser.NoOptionError:
            self.CLOCKS_ONLY = False
            logging.warning( "WARNING : no 'CLOCKS_ONLY' defined in 'DEFAULT' section of " + \
                             self.argDict[ "settings" ] + "...running with CLOCKS_ONLY==False." )

        logging.debug("  ORIK RGG TO BOOLEAN FMLA : using CLOCKS_ONLY = " +
                      str(self.CLOCKS_ONLY))

        # --------------------------------------------------------------- #

        self.boolean_fmla_list_orig = self.orik_rgg_to_fmla_list(orik_rgg)
        literal_to_id_map, self.id_to_literal_map = self.get_literal_maps()
        self.boolean_fmla_list = self.get_smaller_fmlas(literal_to_id_map)

        logging.debug( "  Z3 SOLVER __INIT__ : self.boolean_fmla_list_orig = " + \
                       str( self.boolean_fmla_list_orig ) )
        logging.debug( "  Z3 SOLVER __INIT__ : literal_to_id_map = " + \
                       str( literal_to_id_map ) )
        logging.debug( "  Z3 SOLVER __INIT__ : self.id_to_literal_map = " + \
                       str( self.id_to_literal_map ) )
        logging.debug( "  Z3 SOLVER __INIT__ : self.boolean_fmla_list = " + \
                       str( self.boolean_fmla_list ) )

        # --------------------------------------------------------------- #
        # other persistent data

        self.s = z3.Solver()
        self.fmla_id = 0
        self.prev_fmla_id = self.fmla_id
        self.set_symbols_statement = None
        self.constraint_statement_list = []
        self.previously_found_solns = []
示例#17
0
# standard python packages
import inspect, os, sys

import DerivTree, provTools, GoalNode, FactNode

if not os.path.abspath(__file__ + "/..") in sys.path:
    sys.path.append(os.path.abspath(__file__ + "/.."))
if not os.path.abspath(__file__ + "/../..") in sys.path:
    sys.path.append(os.path.abspath(__file__ + "/../.."))

from utils import tools
from Node import Node

# **************************************** #

DEBUG = tools.getConfig("DERIVATION", "RULENODE_DEBUG", bool)


class RuleNode(Node):

    # --------------------------------- #
    #####################
    #  SPECIAL ATTRIBS  #
    #####################
    descendants = []
    prid = None
    provAttMap = None
    triggerRecord = None

    # --------------------------------- #
示例#18
0
def initClockRelation( cursor, argDict ) :

  COMM_MODEL = tools.getConfig( argDict[ "settings" ], "DEDT", "COMM_MODEL", str )

  # ------------------------------------------------------ #
  # grab the next rule handling method

  try :
    NEXT_RULE_HANDLING = tools.getConfig( argDict[ "settings" ], "DEFAULT", "NEXT_RULE_HANDLING", str )

  except ConfigParser.NoOptionError :
    logging.info( "WARNING : no 'NEXT_RULE_HANDLING' defined in 'DEFAULT' section of settings file." )
    tools.bp( __name__, inspect.stack()[0][3], "FATAL ERROR : NEXT_RULE_HANDLING parameter not specified in DEFAULT section of settings file. use 'USE_AGGS', 'SYNC_ASSUMPTION', or 'USE_NEXT_CLOCK' only." )

  # sanity check next rule handling value
  if NEXT_RULE_HANDLING == "USE_AGGS" or NEXT_RULE_HANDLING == "SYNC_ASSUMPTION" or NEXT_RULE_HANDLING == "USE_NEXT_CLOCK" :
    pass
  else :
    tools.bp( __name__, inspect.stack()[0][3], "FATAL ERROR : unrecognized NEXT_RULE_HANDLING value '" + NEXT_RULE_HANDLING + "'. use 'USE_AGGS', 'SYNC_ASSUMPTION', or 'USE_NEXT_CLOCK' only." )

  # --------------------------------------------------------------------- #

  # check if node topology defined in Fact relation
  nodeFacts = cursor.execute('''SELECT name FROM Fact WHERE Fact.name == "node"''')

  defaultStartSendTime  = '1'
  maxSendTime           = argDict[ "EOT" ]

  # --------------------------------------------------------------------- #
  # prefer cmdline topology
  if argDict[ "nodes" ] :

    logging.debug( "Using node topology from command line: " + str(argDict[ "nodes" ]) )

    nodeSet = argDict[ "nodes" ]

    # synchronous communication model
    if COMM_MODEL == "SYNC" :
      for i in range( int(defaultStartSendTime), int(maxSendTime)+1 ) :
        for n1 in nodeSet :
          for n2 in nodeSet :
            delivTime = str(i + 1)
            #cursor.execute("INSERT OR IGNORE INTO Clock VALUES ('" + n1 + "','" + n2 + "','" + str(i) + "','" + delivTime + "')")
            logging.debug( "INSERT OR IGNORE INTO Clock VALUES ('" + n1 + "','" + n2 + "','" + str(i) + "','" + delivTime + "', 'True')" )
            cursor.execute("INSERT OR IGNORE INTO Clock VALUES ('" + n1 + "','" + n2 + "','" + str(i) + "','" + delivTime + "', 'True')")

            # handle using next_clock relation
            if NEXT_RULE_HANDLING == "USE_NEXT_CLOCK" :
              logging.debug( "INSERT OR IGNORE INTO NextClock VALUES ('" + n1 + "','" + n2 + "','" + str(i) + "','" + delivTime + "', 'True')" )
              cursor.execute("INSERT OR IGNORE INTO NextClock VALUES ('" + n1 + "','" + n2 + "','" + str(i) + "','" + delivTime + "', 'True')")

    # asynchronous communication model
    elif COMM_MODEL == "ASYNC" :
      for i in range( int(defaultStartSendTime), int(maxSendTime)+1 ) :
        for j in range( i, int(maxSendTime)+2 ) :
          for n1 in nodeSet :
            for n2 in nodeSet :
              #cursor.execute("INSERT OR IGNORE INTO Clock VALUES ('" + n1 + "','" + n2 + "','" + str(i) + "','" + delivTime + "')")
              cursor.execute("INSERT OR IGNORE INTO Clock VALUES ('" + n1 + "','" + n2 + "','" + str(i) + "','" + str( j ) + "', 'True')")

              # handle using next_clock relation
              if NEXT_RULE_HANDLING == "USE_NEXT_CLOCK" and j == i + 1 :
                cursor.execute("INSERT OR IGNORE INTO Clock VALUES ('" + n1 + "','" + n2 + "','" + str(i) + "','" + str( j ) + "', 'True')")

    else :
      tools.bp( __name__, inspect.stack()[0][3], "FATAL ERROR : in settings.ini : COMM_MODEL '" + str(COMM_MODEL) + "' not recognized. Aborting." )

  else :
    sys.exit( "ERROR: No node topology specified! Aborting..." )
示例#19
0
文件: DerivTree.py 项目: dotnwat/orik
import pydot

# ------------------------------------------------------ #
import GoalNode, RuleNode, FactNode, provTools

# ------------------------------------------------------ #

if not os.path.abspath( __file__ + "/../.." ) in sys.path :
  sys.path.append( os.path.abspath( __file__ + "/../.." ) )

from utils import tools, dumpers

# **************************************** #


DEBUG = tools.getConfig( "DERIVATION", "DERIVTREE_DEBUG", bool )

# --------------------------------------------------- #
#                   DERIV TREE CLASS                  #
# --------------------------------------------------- #
class DerivTree( ) :

  #############
  #  ATTRIBS  #
  #############
  name           = None # name of relation identifier
  rid            = None # rule id, if applicable
  treeType       = None # goal, rule, or fact
  isNeg          = None # is goal negative?
  root           = None # GoalNode, RuleNode, FactNode
  programResults = None # complete dictionary of parsed results from table dump
示例#20
0
import inspect, os, sys

# ------------------------------------------------------ #
# import sibling packages HERE!!!
if not os.path.abspath(__file__ + "/../..") in sys.path:
    sys.path.append(os.path.abspath(__file__ + "/../.."))

from utils import tools
from wrappers import C4Wrapper

# **************************************** #

C4_EXEC_PATH = os.path.dirname(
    os.path.abspath(__file__)) + "/../../lib/c4/build/src/c4i/c4i"

DEBUG = tools.getConfig("EVALUATORS", "C4_EVALUATOR_DEBUG", bool)


#####################
#  CLEAN TABLE STR  #
#####################
def cleanTableStr(tableStr):

    tableStr = tableStr.split(",")
    arr = []
    for i in tableStr:
        if not i in arr:
            arr.append(i)
    newStr = ",".join(arr)

    return newStr
示例#21
0
def get_dom_rules( orig_name, \
                   not_name, \
                   orig_rid, \
                   parent_rid, \
                   rid_to_rule_meta_map, \
                   ruleMeta, \
                   cursor, \
                   argDict ) :

    newRules = []

    # ----------------------------------------- #
    # get parameters

    settings_path = argDict["settings"]

    # ========== POST EOT FILTER ========== #
    try:
        POST_EOT_FILTER = tools.getConfig(settings_path, "DEFAULT",
                                          "POST_EOT_FILTER", bool)
    except ConfigParser.NoOptionError:
        POST_EOT_FILTER = False
        logging.warning( "WARNING : no 'POST_EOT_FILTER' defined in 'DEFAULT' section of " + \
                         "settings.ini ...running with POST_EOT_FILTER=False." )

    # ----------------------------------------- #
    # ----------------------------------------- #

    logging.debug("=====================================================")
    logging.debug("  GET DOM RULES : orig_name   = " + orig_name)
    logging.debug("  GET DOM RULES : not_name    = " + not_name)
    logging.debug("  GET DOM RULES : orig_rid    = " + str(orig_rid))
    logging.debug("  GET DOM RULES : parent_rid  = " + str(parent_rid))
    logging.debug("  GET DOM RULES : parent rule : ")
    logging.debug("     " + dumpers.reconstructRule(parent_rid, cursor))

    # ------------------------------------------ #
    # gather subgoal atts

    negated_subgoal_atts = get_negated_subgoal_atts(orig_name, parent_rid,
                                                    ruleMeta)
    logging.debug("  GET DOM RULES : negated_subgoal_atts = " +
                  str(negated_subgoal_atts))

    # ------------------------------------------ #
    # map parent goal indexes to atts for
    # eval data extraction

    parent_goal_att_to_index_map, parent_goal_att_list = get_goal_att_to_index_map(
        parent_rid, ruleMeta)
    logging.debug("  GET DOM RULES : parent_goal_att_to_index_map = " +
                  str(parent_goal_att_to_index_map))

    # ------------------------------------------ #
    # generate sip domain idbs
    # [ { subgoalName : 'subgoalNameStr',
    #     subgoalAttList : [ data1, ... , dataN ],
    #     polarity : 'notin' OR '',
    #     subgoalTimeArg : <anInteger> }, ... ]

    # ------------------------------------------ #
    # build the universal domain rule

    uni_ruleData = {}

    # get relation name
    uni_ruleData["relationName"] = "unidom_" + not_name

    # check if a rule already exists
    # to prevent duplicates.
    if idb_already_exists(uni_ruleData["relationName"], cursor):
        return newRules

    #get goal atts
    uni_ruleData[ "goalAttList" ] = [ "A" + str(i) \
                                  for i in \
                                  range( 0, len( negated_subgoal_atts[0] ) ) ] # just need one for arity

    # map domain atts to negated subgoal atts
    # eg. [ [ X, Y ], [ Y, Q ] ]
    #  => dom_thing( A0, A1 ) <- ...
    #  => { A0: [ X, Y ], A1: [ Y, Q ] }
    # initialize maps to empty lists.
    uni_dom_atts_to_par_atts_map = { "A" + str( i ) : [] \
                                     for i in range( 0, \
                                     len( uni_ruleData[ "goalAttList" ] ) ) }
    for neg_sub_atts in negated_subgoal_atts:
        for i in range(0, len(neg_sub_atts)):
            sub_att = neg_sub_atts[i]
            uni_dom_atts_to_par_atts_map["A" + str(i)].append(sub_att)
    logging.debug("  GET DOM RULES : uni_dom_atts_to_par_atts_map = " +
                  str(uni_dom_atts_to_par_atts_map))

    logging.debug("  GET DOM RULES : ----------------------------------------")
    logging.debug("  GET DOM RULES : relationName         = " +
                  uni_ruleData["relationName"])
    logging.debug("  GET DOM RULES : goalAttList          = " +
                  str(uni_ruleData["goalAttList"]))
    logging.debug("  GET DOM RULES : negated_subgoal_atts = " +
                  str(negated_subgoal_atts))

    # get goal time arg
    uni_ruleData["goalTimeArg"] = ""

    # get eqn dict
    uni_ruleData["eqnDict"] = {}

    # =================================== #
    # get subgoal list of dicts

    # unidom rules encompass the subset of all tuples in the complenent of the
    # rule targetted for the DM rewrite which help generate data in the parent rule.
    # accordingly, copy over the contents of the parent rule and project the
    # attributes for the targeted subgoal(s).
    # constrain any remaining free goal variables with the actual contents of the
    # positive definition of the targetted rule.

    # 1. copy and edit over the list of parent subgoals

    parent_rule_meta = rid_to_rule_meta_map[parent_rid]
    uni_subgoalListOfDicts = copy.deepcopy(parent_rule_meta.subgoalListOfDicts)

    # replace subgoal references to the orig_ versions of the rule.
    for i in range(0, len(uni_subgoalListOfDicts)):
        if nw_tools.is_idb( uni_subgoalListOfDicts[ i ][ "subgoalName" ], ruleMeta ) and \
           not uni_subgoalListOfDicts[ i ][ "subgoalName" ].startswith( "not_" )     and \
           not uni_subgoalListOfDicts[ i ][ "subgoalName" ].startswith( "orig_" )     and \
           not uni_subgoalListOfDicts[ i ][ "subgoalName" ].startswith( "unidom_" )  and \
           not uni_subgoalListOfDicts[ i ][ "subgoalName" ].startswith( "exidom_" ) :
            uni_subgoalListOfDicts[ i ][ "subgoalName" ] = "orig_" + \
                                                           uni_subgoalListOfDicts[ i ][ "subgoalName" ]

    # replace atts in the parent subgoals with the goal atts
    # for the unidom rule.
    for gatt in uni_dom_atts_to_par_atts_map:
        these_par_atts = uni_dom_atts_to_par_atts_map[gatt]

        # iterate over parent subgoals
        for i in range(0, len(uni_subgoalListOfDicts)):
            sub = uni_subgoalListOfDicts[i]

            # iterate over the parent subgoal atts
            for j in range(0, len(sub["subgoalAttList"])):
                sub_att = sub["subgoalAttList"][j]

                # make the replacement if the parent sub att appears
                # in the atts corresponding to the unidom goal att
                # under consideration.
                if sub_att in these_par_atts:
                    uni_subgoalListOfDicts[i]["subgoalAttList"][j] = gatt

    logging.debug("  GET DOM RULES : subgoalListOfDicts = " +
                  str(uni_subgoalListOfDicts))

    # 2. integrate a reference to the original version of the targetted rule to fill
    #    in any missing attributes.

    all_body_atts = []
    for sub in uni_subgoalListOfDicts:
        if sub["polarity"] == "":
            for satt in sub["subgoalAttList"]:
                if not satt in all_body_atts:
                    all_body_atts.append(satt)

    missing_gatts = []
    for gatt in uni_dom_atts_to_par_atts_map:
        if not gatt in all_body_atts:
            missing_gatts.append(gatt)

#  print "uni_dom_atts_to_par_atts_map = " + str( uni_dom_atts_to_par_atts_map )
#  print "all_body_atts = " + str( all_body_atts )
#  print "missing_gatts = " + str( missing_gatts )
#
#  if uni_ruleData[ "relationName" ] == "unidom_not_node_f23" :
#    sys.exit( "blah" )

    if len(missing_gatts) > 0:
        orig_sub = {}
        orig_sub["subgoalName"] = orig_name
        orig_sub["subgoalTimeArg"] = ""
        orig_sub["polarity"] = ""
        orig_sub["subgoalAttList"] = []
        for i in range(0, len(uni_dom_atts_to_par_atts_map)):
            if "A" + str(i) in missing_gatts:
                orig_sub["subgoalAttList"].append("A" + str(i))
            else:
                orig_sub["subgoalAttList"].append("_")
        uni_subgoalListOfDicts.append(orig_sub)

    uni_ruleData["subgoalListOfDicts"] = uni_subgoalListOfDicts

    # =================================== #
    # save rule

    # replace time arg with constant if the negated subgoal stems from post
    if POST_EOT_FILTER and parent_name == "post":
        uni_ruleData["goalAttList"][-1] = argDict["EOT"]

    uni_rid = tools.getIDFromCounters("rid")
    uni_rule = copy.deepcopy(Rule.Rule(uni_rid, uni_ruleData, cursor))
    uni_rule.cursor = cursor  # need to do this for some reason or else cursor disappears?

    # set the unidom rule types manually
    uni_goal_types = []
    for rule in ruleMeta:
        if rule.rid == orig_rid:
            uni_goal_types = rule.goal_att_type_list
    assert (len(uni_goal_types) > 0)

    uni_rule.goal_att_type_list = uni_goal_types
    uni_rule.manually_set_types()

    # check if a rule already exists
    # to prevent duplicates.
    if not nw_tools.identical_rule_already_exists(uni_rule, ruleMeta):
        newRules.append(uni_rule)
        logging.debug( "  GET DOM RULES : added uni dom rule :\n     " + \
                       dumpers.reconstructRule( uni_rule.rid, uni_rule.cursor ) )
    else:
        logging.debug( "  GET DOM RULES : NOT adding uni dom rule :\n     " + \
                       dumpers.reconstructRule( uni_rule.rid, uni_rule.cursor ) )

#  if uni_rule.relationName == "unidom_not_node_f40" :
#    print orig_name
#    print not_name
#    print dumpers.reconstructRule( parent_rid, uni_rule.cursor )
#    print dumpers.reconstructRule( uni_rule.rid, uni_rule.cursor )
#    sys.exit( "blah" )

# ------------------------------------------ #
# build the existential domain rule

# exidom_ encompasses the set of data from the original version
# of the target rule which contributes to generating data in
# the original version of the target relation.
# accordingly, one exidom_ rule exists per rule in the definition.

# get the list of rules defining the target relation
    target_rules = []
    for rule in ruleMeta:
        if rule.relationName == "orig_" + orig_name:
            target_rules.append(rule)

    for target_rule in target_rules:

        exi_ruleData = {}

        # get relation name
        # need the extra _f to maintain arities.
        exi_ruleData["relationName"] = "exidom_" + not_name + "_f" + str(
            target_rule.rid)

        # grab all existential vars from the original definition for the
        # target relation.
        all_exi_vars = []
        for sub in target_rule.subgoalListOfDicts:
            for satt in sub["subgoalAttList"]:
                if not satt in target_rule.goalAttList and \
                   not satt in all_exi_vars            and \
                   not satt == "_" :
                    all_exi_vars.append(satt)

        # only write an exidom_ rule if existential vars exist.
        if len(all_exi_vars) > 0:

            #get goal atts
            exi_ruleData["goalAttList"] = copy.deepcopy(all_exi_vars)

            # get goal time arg
            exi_ruleData["goalTimeArg"] = ""

            # get eqn dict
            exi_ruleData["eqnDict"] = {}

            # =================================== #
            # get subgoals

            exi_subgoalListOfDicts = copy.deepcopy(
                target_rule.subgoalListOfDicts)
            for i in range(0, len(exi_subgoalListOfDicts)):
                sub = exi_subgoalListOfDicts[i]
                if not sub[ "subgoalName" ].startswith( "orig_" )   and \
                   not sub[ "subgoalName" ] == "clock"              and \
                   not sub[ "subgoalName" ] == "next_clock"         and \
                   not sub[ "subgoalName" ] == "crash"              and \
                   not sub[ "subgoalName" ].startswith( "not_" )    and \
                   not sub[ "subgoalName" ].startswith( "unidom_" ) and \
                   not sub[ "subgoalName" ].startswith( "exidom_" ) and \
                   nw_tools.is_idb( sub[ "subgoalName" ], ruleMeta ) :
                    exi_subgoalListOfDicts[i][
                        "subgoalName"] = "orig_" + sub["subgoalName"]

            exi_ruleData["subgoalListOfDicts"] = exi_subgoalListOfDicts

            # =================================== #
            # save rule

            exi_rid = tools.getIDFromCounters("rid")
            exi_rule = copy.deepcopy(Rule.Rule(exi_rid, exi_ruleData, cursor))
            exi_rule.cursor = cursor  # need to do this for some reason or else cursor disappears?

            # set the unidom rule types manually
            exi_goal_types = []
            for gatt in exi_rule.goalAttList:
                for sub in exi_subgoalListOfDicts:
                    if gatt in sub["subgoalAttList"]:
                        gatt_index = sub["subgoalAttList"].index(gatt)
                        for rule in ruleMeta:
                            if rule.relationName == sub["subgoalName"]:
                                exi_goal_types.append(
                                    rule.goal_att_type_list[gatt_index])
            assert (len(uni_goal_types) > 0)

            exi_rule.goal_att_type_list = exi_goal_types
            exi_rule.manually_set_types()

            # check if a rule already exists
            # to prevent duplicates.
            if not nw_tools.identical_rule_already_exists(exi_rule, cursor):
                newRules.append(exi_rule)
                logging.debug( "  GET DOM RULES : added exi dom rule :\n     " + \
                               dumpers.reconstructRule( exi_rule.rid, exi_rule.cursor ) )
            else:
                logging.debug( "  GET DOM RULES : NOT adding exi dom rule :\n     " + \
                               dumpers.reconstructRule( exi_rule.rid, exi_rule.cursor ) )

    logging.debug("  GET DOM RULES : domain rules:")
    for rule in newRules:
        logging.debug("     " + dumpers.reconstructRule(rule.rid, rule.cursor))

    #if uni_ruleData[ "relationName" ] == "unidom_not_node_f23" :
    #  for rule in newRules :
    #    print dumpers.reconstructRule( rule.rid, rule.cursor )
    #  sys.exit( "blah" )

    return newRules
示例#22
0
import inspect, os, string, sys, traceback
from pyparsing import *

# ------------------------------------------------------ #
# import sibling packages HERE!!!
if not os.path.abspath(__file__ + "/../..") in sys.path:
    sys.path.append(os.path.abspath(__file__ + "/../.."))

from utils import tools
# ------------------------------------------------------ #

#############
#  GLOBALS  #
#############
DEDALUSPARSER_DEBUG = tools.getConfig("DEDT", "DEDALUSPARSER_DEBUG", bool)

keywords = ["notin"]  # TODO: make this configurable


##################
#  CLEAN RESULT  #
##################
# input pyparse object of the form ([...], {...})
# output only [...]
def cleanResult(result):
    newResult = []

    numParsedStrings = len(result)
    for i in range(0, numParsedStrings):
        newResult.append(result[i])
示例#23
0
import inspect, os, sys

# ------------------------------------------------------ #
# import sibling packages HERE!!!
if not os.path.abspath(__file__ + "/../..") in sys.path:
    sys.path.append(os.path.abspath(__file__ + "/../.."))

from utils import extractors, tools
import dedalusParser
import Rule
# ------------------------------------------------------ #

#############
#  GLOBALS  #
#############
PROVENANCEREWRITE_DEBUG = tools.getConfig("DEDT", "PROVENANCEREWRITE_DEBUG",
                                          bool)
aggOps = ["min<", "max<", "sum<", "avg<",
          "count<"]  # TODO: make this configurable

timeAtt = "SndTime"


##############
#  AGG PROV  #
##############
def aggProv(aggRule, nameAppend, cursor):

    # create bindings rule (see LDFI paper section 4.1.2)
    bindingsRule = regProv(aggRule, nameAppend, cursor)

    # generate random ID
示例#24
0
def sanityCheckSyntax_rule_postChecks(ruleLine, ruleData, settings_path):

    # ------------------------------------------ #
    # make sure all subgoals in next and async
    # rules have identical first attributes

    try:
        use_hacks = tools.getConfig(settings_path, "DEFAULT", "USE_HACKS",
                                    bool)
        if use_hacks:
            if ruleData["goalTimeArg"] == "next":
                check_identical_first_atts(ruleLine, ruleData)
        else:
            check_min_one_pos_subgoal_no_time_arg(ruleData)
            if ruleData["goalTimeArg"] == "next" or ruleData[
                    "goalTimeArg"] == "async":
                check_identical_first_atts(ruleLine, ruleData)

    except ConfigParser.NoOptionError:
        logging.warning(
            "WARNING : no 'USE_HACKS' defined in 'DEFAULT' section of settings.ini ...running without wildcard rewrites."
        )

        check_min_one_pos_subgoal_no_time_arg(ruleLine, ruleData)
        if ruleData["goalTimeArg"] == "next" or ruleData[
                "goalTimeArg"] == "async":
            check_identical_first_atts(ruleLine, ruleData)

    # ------------------------------------------ #
    # make sure all goal and subgoal attribute
    # variables start with a captial letter

    goalAttList = ruleData["goalAttList"]
    for att in goalAttList:
        if not att[0].isalpha() or not att[0].isupper():
            if not hasAgg(att):  # att is not an aggregate call
                if not isFixedStr(att):  # att is not a fixed data input
                    if not isFixedInt(att):  # att is not a fixed data input
                        sys.exit(
                            "  SANITY CHECK SYNTAX RULE : ERROR : invalid syntax in line '"
                            + ruleLine +
                            "'\n    the goal contains contains an attribute not starting with a capitalized letter: '"
                            + att +
                            "'. \n    attribute variables must start with an upper case letter."
                        )

    subgoalListOfDicts = ruleData["subgoalListOfDicts"]
    for sub in subgoalListOfDicts:

        subgoalAttList = sub["subgoalAttList"]
        for att in subgoalAttList:

            if not att[0].isalpha() or not att[0].isupper():
                if not hasAgg(att):  # att is not an aggregate call
                    if not isFixedStr(att):  # att is not a fixed data input
                        if not isFixedInt(
                                att):  # att is not a fixed data input
                            # subgoals can have wildcards
                            if not att[0] == "_":
                                sys.exit(
                                    "  SANITY CHECK SYNTAX RULE : ERROR : invalid syntax in line '"
                                    + ruleLine + "'\n    subgoal '" +
                                    sub["subgoalName"] +
                                    "' contains an attribute not starting with a capitalized letter: '"
                                    + att +
                                    "'. \n    attribute variables must start with an upper case letter."
                                )

    # ------------------------------------------ #
    # make sure all relation names are
    # lower case

    goalName = ruleData["relationName"]
    for c in goalName:
        if c.isalpha() and not c.islower():
            sys.exit(
                "  SANITY CHECK SYNTAX RULE : ERROR : invalid syntax in line '"
                + ruleLine + "'\n    The goal name '" + goalName +
                "' contains an upper-case letter. \n    relation names must contain only lower-case characters."
            )

    subgoalListOfDicts = ruleData["subgoalListOfDicts"]
    for sub in subgoalListOfDicts:
        subName = sub["subgoalName"]
        for c in subName:
            if c.isalpha() and not c.islower():
                sys.exit(
                    "  SANITY CHECK SYNTAX RULE : ERROR : invalid syntax in line '"
                    + ruleLine + "'\n    The subgoal name '" + subName +
                    "' contains an upper-case letter. \n    relation names must contain only lower-case characters."
                )

    return True
示例#25
0
文件: GoalNode.py 项目: dotnwat/orik
# standard python packages
import inspect, os, sys

import DerivTree, RuleNode, FactNode, provTools

if not os.path.abspath(__file__ + "/..") in sys.path:
    sys.path.append(os.path.abspath(__file__ + "/.."))
if not os.path.abspath(__file__ + "/../..") in sys.path:
    sys.path.append(os.path.abspath(__file__ + "/../.."))

from utils import tools
from Node import Node

# **************************************** #

DEBUG = tools.getConfig("DERIVATION", "GOALNODE_DEBUG", bool)


class GoalNode(Node):

    #####################
    #  SPECIAL ATTRIBS  #
    #####################
    descendants = []
    name = None
    isNeg = None
    seedRecord = None
    results = []

    #################
    #  CONSTRUCTOR  #
示例#26
0
def c4datalog(argDict, cursor):

    logging.debug("  C4 DATALOG : running process...")

    goalName = None
    provGoalNameOrig = None

    tableListStr = ""  # collect all table names delmited by a single comma only.
    tableListArray = []

    # ----------------------------------------------------------- #
    # create goal defines

    # get all rids
    cursor.execute("SELECT rid FROM Rule")
    ridList = cursor.fetchall()
    ridList = tools.toAscii_list(ridList)

    definesNames = []
    definesList = []
    # ////////////////////////////////////////////////////////// #
    # populate defines list for rules
    for rid in ridList:
        newDefine = ""

        # get goal name
        cursor.execute("SELECT goalName FROM Rule WHERE rid = '" + rid + "'")
        goalName = cursor.fetchone()
        goalName = tools.toAscii_str(goalName)

        # if it's a prov rule, get the original goal name
        provGoalNameOrig = None
        if "_prov" in goalName:
            provGoalNameOrig = goalName.split("_prov")
            provGoalNameOrig = provGoalNameOrig[0]

        # populate table information collection structures
        tableListStr += goalName + ","
        tableListArray.append(goalName)

        # ////////////////////////////////////////////////////////// #
        # populate defines list for rule goals
        logging.debug("In c4datalog: definesList = " + str(definesList))

        if not existingDefine(goalName, definesNames):  # prevent duplicates

            # get goal attribute list
            cursor.execute("SELECT attID,attType From GoalAtt WHERE rid = '" +
                           rid + "'")
            goalAttList = cursor.fetchall()
            goalAttList = tools.toAscii_multiList(goalAttList)

            logging.debug("* goalName = " + goalName + ", goalAttList " +
                          str(goalAttList))

            # populate type list for rule
            typeList = []
            for k in range(0, len(goalAttList)):
                att = goalAttList[k]
                attID = att[0]
                attType = att[1]

                typeList.append(attType)

            # populate new c4 define statement
            newDefine = ""
            newDefine += "define("
            newDefine += goalName
            newDefine += ",{"

            for i in range(0, len(typeList)):
                newDefine += typeList[i]
                if i < len(typeList) - 1:
                    newDefine += ","
                else:
                    newDefine += "});" + "\n"

            # save new c4 define statement
            if not newDefine in definesList:
                definesNames.append(goalName)
                definesList.append(newDefine)
        # ////////////////////////////////////////////////////////// #

    # ----------------------------------------------------------- #
    # create fact defines

    # get all fact ids
    cursor.execute("SELECT fid FROM Fact")
    fidList = cursor.fetchall()
    fidList = tools.toAscii_list(fidList)

    for fid in fidList:

        # get goal name
        cursor.execute("SELECT name FROM Fact WHERE fid = '" + fid + "'")
        factName = cursor.fetchone()
        factName = tools.toAscii_str(factName)

        logging.debug("**> factName = " + factName)

        logging.debug("In c4datalog: definesList = " + str(definesList))

        if not existingDefine(factName, definesNames):  # prevent duplicates

            # populate table string
            tableListStr += factName + ","
            tableListArray.append(factName)

            # get goal attribute list
            cursor.execute(
                "SELECT dataID,dataType From FactData WHERE fid = '" + fid +
                "'")
            factAttList = cursor.fetchall()
            factAttList = tools.toAscii_multiList(factAttList)

            logging.debug("* factName = " + factName + ", factAttList " +
                          str(factAttList))

            # populate type list for fact
            typeList = []
            for k in range(0, len(factAttList)):
                att = factAttList[k]
                attID = att[0]
                attType = att[1]

                typeList.append(attType)

            # check for time argument
            #cursor.execute( "SELECT timeArg FROM Fact WHERE fid='" + fid + "'" )
            #timeArg = cursor.fetchone()
            #timeArg = tools.toAscii_str( timeArg )

            #if timeArg :
            #  typeList.append( "int" )

            # populate new c4 define statement
            newDefine = ""
            newDefine += "define("
            newDefine += factName
            newDefine += ",{"

            for i in range(0, len(typeList)):
                newDefine += typeList[i]
                if i < len(typeList) - 1:
                    newDefine += ","
                else:
                    newDefine += "});" + "\n"

            # save new c4 define statement
            if not newDefine in definesList:
                definesNames.append(factName)
                definesList.append(newDefine)
    # ////////////////////////////////////////////////////////// #

    # ----------------------------------------------------------- #
    # add clock define

    definesList.append("define(clock,{string,string,int,int});\n")
    tableListStr += "clock,"
    tableListArray.append("clock")

    # ----------------------------------------------------------- #
    # add not_clock define

    #definesList.append( "define(not_clock,{string,string,int,int});\n" )
    #tableListStr += "not_clock,"
    #tableListArray.append( "not_clock" )

    # ----------------------------------------------------------- #
    # add crash define

    definesList.append("define(crash,{string,string,int,int});\n")
    tableListStr += "crash,"
    tableListArray.append("crash")

    # ----------------------------------------------------------- #
    # add facts

    cursor.execute("SELECT fid FROM Fact")
    fidList = cursor.fetchall()
    fidList = tools.toAscii_list(fidList)

    factList = []
    for fid in fidList:
        newFact = dumpers_c4.dumpSingleFact_c4(fid, cursor)
        factList.append(newFact)

    # ----------------------------------------------------------- #
    # add clock facts

    clockFactList = dumpers_c4.dump_clock(cursor)

    logging.debug("c4_translator: clockFactList = " + str(clockFactList))

    # ----------------------------------------------------------- #
    # add crash facts

    crashFactList = dumpers_c4.dump_crash(cursor)
    #crashFactList = []

    #print crashFactList
    #tools.bp( __name__, inspect.stack()[0][3], "blah" )

    #logging.debug( "c4_translator: crashFactList = " + str( crashFactList ) )

    # ----------------------------------------------------------- #
    # add rules

    cursor.execute("SELECT rid FROM Rule")
    ridList = cursor.fetchall()
    ridList = tools.toAscii_list(ridList)

    ruleList = []
    for rid in ridList:

        # verify data type compatibility for rules with equations
        #verificationResults = tools.checkDataTypes( rid, cursor ) # returns array

        #yesCompatible = verificationResults[0]
        #offensiveEqn  = verificationResults[1]
        #lhsType       = verificationResults[2]
        #rhsType       = verificationResults[3]

        #if yesCompatible :
        if True:
            newRule = dumpers_c4.dumpSingleRule_c4(rid, cursor)
            ruleList.append(newRule)

        else:  # data types are incompatible
            # throw error and abort
            tools.bp(
                __name__,
                inspect.stack()[0][3],
                "FATAL ERROR: DATA TYPE INCOMPATABILITY\nAttempting to evaluate an equation in which variables possess incomparable types.\nERROR in line: "
                + dumpers_c4.dumpSingleRule_c4(rid, cursor) +
                "\nERROR in eqn: " + offensiveEqn + "\nlhs is of type " +
                lhsType + " and rhs is of type " + rhsType)

    # ------------------------------------------------------ #
    # grab the next rule handling method

    try:
        NEXT_RULE_HANDLING = tools.getConfig( argDict[ "settings" ], \
                                              "DEFAULT", \
                                              "NEXT_RULE_HANDLING", \
                                              str )

    except ConfigParser.NoOptionError:
        logging.info(
            "WARNING : no 'NEXT_RULE_HANLDING' defined in 'DEFAULT' section of settings file."
        )
        tools.bp( __name__, inspect.stack()[0][3], \
                 "FATAL ERROR : NEXT_RULE_HANDLING parameter not specified in DEFAULT section of settings file. use 'USE_AGGS', 'SYNC_ASSUMPTION', or 'USE_NEXT_CLOCK' only." )

    # sanity check next rule handling value
    if NEXT_RULE_HANDLING == "USE_AGGS" or \
       NEXT_RULE_HANDLING == "SYNC_ASSUMPTION" or \
       NEXT_RULE_HANDLING == "USE_NEXT_CLOCK" :
        pass
    else:
        tools.bp( __name__, inspect.stack()[0][3], \
                  "FATAL ERROR : unrecognized NEXT_RULE_HANDLING value '" + NEXT_RULE_HANDLING + "'. use 'USE_AGGS', 'SYNC_ASSUMPTION', or 'USE_NEXT_CLOCK' only." )

    # ----------------------------------------------------------- #
    # add next_clock, if necessary

    if NEXT_RULE_HANDLING == "USE_NEXT_CLOCK":

        # ------------------------------------------------------ #
        # add define

        definesList.append("define(next_clock,{string,string,int,int});\n")
        tableListStr += "next_clock,"
        tableListArray.append("next_clock")

        # ------------------------------------------------------ #
        # add next_clock facts for all synchronous facts appearing clock

        next_clock_factList = []
        for cfact in clockFactList:
            if isSynchronous(cfact):
                next_clock_fact = "next_" + cfact
                next_clock_factList.append(next_clock_fact)

    # ----------------------------------------------------------- #
    # save table list

    logging.debug("*******************************************")
    logging.debug("table list str :\n" + str(tableListStr))
    logging.debug("table list array :\n" + str(tableListArray))

    # ----------------------------------------------------------- #
    # collect program statements

    logging.debug("*******************************************")
    logging.debug("definesList :\n" + str(definesList))
    logging.debug("*******************************************")
    logging.debug("factList :\n" + str(factList))
    logging.debug("*******************************************")
    logging.debug("ruleList :\n" + str(ruleList))

    # NOTE: listOfStatementLists controls the ordering of statements
    #       in the final c4 program.
    if NEXT_RULE_HANDLING == "USE_NEXT_CLOCK":
        listOfStatementLists = [ definesList, \
                                 ruleList, \
                                 factList, \
                                 crashFactList, \
                                 next_clock_factList, \
                                 clockFactList ]
    else:
        #listOfStatementLists = [ definesList, \
        #                         factList, \
        #                         ruleList, \
        #                         clockFactList ]
        listOfStatementLists = [ definesList, \
                                 ruleList, \
                                 factList, \
                                 crashFactList, \
                                 clockFactList ]

    program = tools.combineLines(listOfStatementLists)

    # break down into list of individual statements
    allProgramLines = []
    for group in listOfStatementLists:
        for statement in group:
            allProgramLines.append(statement.rstrip())

    # remove duplicates
    tableListArray = set(tableListArray)
    tableListArray = list(tableListArray)

    logging.debug("  C4 DATALOG : ...done.")
    return [allProgramLines, tableListArray]
示例#27
0
# **************************************** #

#############
#  IMPORTS  #
#############
# standard python packages
import inspect, os, pydot, sys

if not os.path.abspath(__file__ + "/../..") in sys.path:
    sys.path.append(os.path.abspath(__file__ + "/../.."))

from utils import tools

# **************************************** #

DEBUG = tools.getConfig("DERIVATION", "PROVTOOLS_DEBUG", bool)
C4_RESULTS_PATH = os.path.abspath(__file__ +
                                  "/../../../save_data/c4Output/c4dump.txt")


#################
#  CREATE NODE  #
#################
def createNode(nodeToAdd):

    if nodeToAdd.treeType == "goal":
        thisNode = pydot.Node(str(nodeToAdd), shape='oval')

    elif nodeToAdd.treeType == "rule":
        thisNode = pydot.Node(str(nodeToAdd), shape='box')
示例#28
0
    def run(self, allProgramData):

        allProgramLines = allProgramData[
            0]  # := list of every code line in the generated C4 program.
        tableList = allProgramData[
            1]  # := list of all tables in generated C4 program.

        # get full program
        fullprog = self.getInputProg_one_group_for_everything_besides_clocks_and_group_clocks_by_sndTime(
            allProgramLines)

        # ----------------------------------------- #
        # outputs are good

        logging.debug("PRINTING RAW INPUT PROG")
        for x in fullprog:
            logging.debug(x)

        logging.debug("PRINTING LEGIBLE INPUT PROG")
        for line in fullprog:
            line = line.split(";")
            for statement in line:
                statement = statement.rstrip()
                if not statement == "":
                    statement = statement + ";"
                    logging.debug(statement)

        if os.path.isdir(self.argDict["data_save_path"]):
            filename = self.argDict["data_save_path"] + "/full_program.olg"
            logging.debug("SAVING TO FILE at path : " + filename)
            fo = open(filename, "w")
            for line in fullprog:
                line = line.split(";")
                for statement in line:
                    statement = statement.rstrip()
                    if not statement == "":
                        statement = statement + ";"
                        fo.write(statement + "\n")
            fo.close()

        # ----------------------------------------- #
        # initialize c4 instance

        self.lib.c4_initialize()
        self.c4_obj = self.lib.c4_make(None, 0)

        # ---------------------------------------- #
        # load program

        logging.debug("... loading prog ...")

        for subprog in fullprog:
            logging.debug("SUBMITTING SUBPROG : ")
            logging.debug(subprog)
            c_prog = bytes(subprog)
            logging.debug("...completed bytes conversion...")
            self.lib.c4_install_str(self.c4_obj, c_prog)
            logging.debug("...done installing str.")

        # ---------------------------------------- #
        # dump program results to file

        logging.debug("... dumping program ...")

        results_array = self.saveC4Results_toArray(tableList)

        # ---------------------------------------- #
        # close c4 program

        logging.debug("... closing C4 ...")

        self.lib.c4_destroy(self.c4_obj)
        self.lib.c4_terminate()

        try:
            C4_HOME_PATH = tools.getConfig(self.argDict["settings"], "DEFAULT",
                                           "C4_HOME_PATH", str)
            try:
                # for safety:
                C4_HOME_PATH = C4_HOME_PATH.replace("/c4_home", "")
                C4_HOME_PATH = C4_HOME_PATH.replace("//", "")

                assert (os.path.isdir(C4_HOME_PATH) == True)
                os.system("rm -rf " + C4_HOME_PATH + "/c4_home/*")

            except AssertionError:
                raise AssertionError(C4_HOME_PATH + " does not exist.")

        except ConfigParser.NoOptionError as e:
            logging.info(
                "  FATAL ERROR : option 'C4_HOME_PATH' not set in settings file '"
                + self.argDict["settings"] + "'. aborting.")
            raise e

        return results_array
示例#29
0
文件: FactNode.py 项目: dotnwat/orik
#  IMPORTS  #
#############
# standard python packages
import inspect, os, sys

if not os.path.abspath(__file__ + "/..") in sys.path:
    sys.path.append(os.path.abspath(__file__ + "/.."))
if not os.path.abspath(__file__ + "/../..") in sys.path:
    sys.path.append(packagePath2)

from utils import tools
from Node import Node

# **************************************** #

DEBUG = tools.getConfig("DERIVATION", "FACTNODE_DEBUG", bool)


class FactNode(Node):

    ########################
    #  SPECIAL ATTRIBUTES  #
    ########################
    triggerRecord = None

    #################
    #  CONSTRUCTOR  #
    #################
    def __init__(self, name, isNeg, record, results, cursor):

        # NODE CONSTRUCTOR: treeType, name, isNeg, record, program results, dbcursor
示例#30
0
def aggProv(aggRule, provid, cursor, argDict):

    logging.debug("  AGG PROV : running aggProv...")

    try:
        USING_MOLLY = tools.getConfig(argDict["settings"], "DEFAULT",
                                      "USING_MOLLY", bool)
    except ConfigParser.NoOptionError:
        logging.warning(
            "WARNING : no 'USING_MOLLY' defined in 'DEFAULT' section of settings.ini ...assume running without molly."
        )
        USING_MOLLY = False

    orig_aggRule_goalAttList = aggRule.ruleData["goalAttList"]

    logging.debug("  AGG PROV : orig_aggRule_goalAttList = " +
                  str(orig_aggRule_goalAttList))

    # ------------------------------------------------------ #
    #                 BUILD THE BINDINGS RULE                #
    # ------------------------------------------------------ #

    # ------------------------------------------------------ #
    # generate a random ID for the new provenance rule

    bindings_rid = tools.getIDFromCounters("rid")

    # ------------------------------------------------------ #
    # initialize the prov rule to old version of
    # meta rule

    bindingsmeta_ruleData = {}
    for key in aggRule.ruleData:
        val = aggRule.ruleData[key]
        bindingsmeta_ruleData[key] = val

    logging.debug("  AGG PROV : bindingsmeta_ruleData = " +
                  str(bindingsmeta_ruleData))

    # ------------------------------------------------------ #
    # the provenance rule name ends with "_prov" appended
    # with a unique number

    # NOTE!!!! LDFI paper says "_bindings", but molly implementation actually uses "_vars" append. >~<

    #bindingsmeta_ruleData[ "relationName" ] = bindingsmeta_ruleData[ "relationName" ] + "_bindings" + str( provid )
    bindingsmeta_ruleData[
        "relationName"] = bindingsmeta_ruleData["relationName"] + "_vars"

    # ------------------------------------------------------ #
    # the goal att list consists of all subgoal atts

    bindings_goalAttList = []

    # grab all goal atts
    old_bindings_goalAttList = bindingsmeta_ruleData["goalAttList"]
    bindings_goalAttList = getAllGoalAtts_noAggs(old_bindings_goalAttList)

    # extract and save the time argument as the last element in the attribute list
    bindings_goalAttList_last = bindings_goalAttList[-1]
    bindings_goalAttList = bindings_goalAttList[:-1]

    # grab all subgoal atts
    subgoalListOfDicts = bindingsmeta_ruleData["subgoalListOfDicts"]

    logging.debug("  AGG PROV : subgoalListOfDicts = " +
                  str(subgoalListOfDicts))

    for subgoal in subgoalListOfDicts:
        subgoalAttList = subgoal["subgoalAttList"]
        for att in subgoalAttList:

            # don't duplicate atts in the prov head
            if not att in bindings_goalAttList:

                # do not add wildcards and fixed integer inputs
                if not att == "_" and not att.isdigit():

                    # do not add fixed string inputs
                    if not isFixedString(att):
                        bindings_goalAttList.append(att)

    # add the time argument last
    if not bindings_goalAttList_last in bindings_goalAttList:
        bindings_goalAttList.append(bindings_goalAttList_last)

    # save to rule data
    if USING_MOLLY:
        bindings_goalAttList = sortGoalAttList(bindings_goalAttList)
    bindingsmeta_ruleData["goalAttList"] = bindings_goalAttList

    # ------------------------------------------------------ #
    # preserve adjustments by instantiating the new meta rule
    # as a Rule

    bindings_rule = Rule.Rule(bindings_rid, bindingsmeta_ruleData, cursor)
    bindings_rule.rule_type = aggRule.rule_type

    # ------------------------------------------------------ #
    #              BUILD THE AGG PROVENANCE RULE             #
    # ------------------------------------------------------ #

    # ------------------------------------------------------ #
    # generate a random ID for the new provenance rule

    aggprovmeta_rid = tools.getIDFromCounters("rid")

    # ------------------------------------------------------ #
    # initialize rule data

    aggprovmeta_ruleData = {}

    # ------------------------------------------------------ #
    # the provenance rule name ends with "_bindings" appended
    # with a unique number

    aggprovmeta_ruleData["relationName"] = aggRule.ruleData[
        "relationName"] + "_prov" + str(provid)

    # ------------------------------------------------------ #
    # the goal att list consists of all subgoal atts

    if USING_MOLLY:
        orig_aggRule_goalAttList = sortGoalAttList(orig_aggRule_goalAttList)

    aggprovmeta_ruleData["goalAttList"] = orig_aggRule_goalAttList

    # ------------------------------------------------------ #
    # define goal time arg as empty

    aggprovmeta_ruleData["goalTimeArg"] = ""

    # ------------------------------------------------------ #
    # define subgoal list of dicts
    # agg prov rules only have one subgoal in the head of
    # the previously defined bindings rule

    subgoalListOfDicts = []
    bindings_subgoal = {}
    bindings_subgoal["subgoalName"] = bindingsmeta_ruleData["relationName"]

    # replace all existential vars in the subgoal att list with wildcards
    allGoalAtts = getAllGoalAtts_noAggs(aggprovmeta_ruleData["goalAttList"])
    allSubgoalAtts = bindingsmeta_ruleData["goalAttList"]

    subgoalAttList = []
    for att in allSubgoalAtts:
        if not att in allGoalAtts:
            subgoalAttList.append("_")
        else:
            subgoalAttList.append(att)

    bindings_subgoal["subgoalAttList"] = subgoalAttList
    bindings_subgoal["polarity"] = ""
    bindings_subgoal["subgoalTimeArg"] = ""

    subgoalListOfDicts.append(bindings_subgoal)
    aggprovmeta_ruleData["subgoalListOfDicts"] = subgoalListOfDicts

    # ------------------------------------------------------ #
    # define eqnDict as empty

    aggprovmeta_ruleData["eqnDict"] = {}

    # ------------------------------------------------------ #
    # preserve adjustments by instantiating the new meta rule
    # as a Rule

    aggprovmeta_rule = Rule.Rule(aggprovmeta_rid, aggprovmeta_ruleData, cursor)
    aggprovmeta_rule.rule_type = aggRule.rule_type

    # ------------------------------------------------------ #
    #               REWRITE ORIGINAL AGG RULE                #
    # ------------------------------------------------------ #

    # ------------------------------------------------------ #

    # update rule meta with the new bindings subgoal
    aggRule.ruleData["subgoalListOfDicts"] = aggprovmeta_rule.ruleData[
        "subgoalListOfDicts"]
    aggRule.subgoalListOfDicts = aggRule.ruleData["subgoalListOfDicts"]

    # save new subgoal data
    aggRule.saveSubgoals()

    # ------------------------------------------------------ #

    # update rule meta with the new empty eqn dict
    aggRule.ruleData["eqnDict"] = aggprovmeta_rule.ruleData["eqnDict"]
    aggRule.eqnDict = aggRule.ruleData["eqnDict"]

    # save new subgoal data
    aggRule.saveEquations()

    return [bindings_rule, aggprovmeta_rule]