def parse_string(self, contents:str)->HTS: ''' Parses a string representation of an initial state file ''' hts = HTS("INIT") ts = TS("TS INIT") init = [] for line in contents.split('\n'): line = line.strip() if not line: continue else: res = self.parse_line(line) if res is not None: init.append(res) Logger.msg("Initial state file set concrete values for {} state variables".format(len(init)), 1) ts.init = And(init) ts.invar = TRUE() ts.trans = TRUE() hts.add_ts(ts) return hts
def __solve_problem(self, hts: HTS, prop: Optional[FNode], lemmas: Optional[List[FNode]], assumptions: Optional[List[FNode]], problem: NamedTuple) -> str: trace = None traces = None region = None # only used for parametric model checking accepted_ver = False assert hts.assumptions is None, "There should not be any left-over assumptions from previous problems" for assump in assumptions: hts.add_assumption(assump) for lemma in lemmas: hts.add_lemma(lemma) bmc_safety = BMCSafety(hts, problem) bmc_parametric = BMCParametric(hts, problem) bmc_ltl = BMCLTL(hts, problem) res = VerificationStatus.UNC bmc_length = problem.bmc_length bmc_length_min = problem.bmc_length_min if problem.verification == VerificationType.SAFETY: accepted_ver = True Logger.log("Property: %s" % (prop.serialize(threshold=100)), 2) res, trace, _ = bmc_safety.safety(prop, bmc_length, bmc_length_min, problem.processes) if problem.verification == VerificationType.LTL: accepted_ver = True res, trace, _ = bmc_ltl.ltl(prop, bmc_length, bmc_length_min) if problem.verification == VerificationType.SIMULATION: accepted_ver = True res, trace = bmc_safety.simulate(prop, bmc_length) if problem.verification == VerificationType.PARAMETRIC: accepted_ver = True Logger.log("Property: %s" % (prop.serialize(threshold=100)), 2) res, traces, region = bmc_parametric.parametric_safety( prop, bmc_length, bmc_length_min, ModelExtension.get_parameters(hts), at_most=problem.cardinality) if problem.verification == VerificationType.EQUIVALENCE: accepted_ver = True bmcseq = BMCSafety(hts, problem) res, trace, t = bmcseq.safety(prop, bmc_length, bmc_length_min) if not accepted_ver: Logger.error("Invalid verification type") Logger.log("\n*** Problem \"%s\" is %s ***" % (problem.name, res), 1) return res, trace, traces, region
def parse_file(self, filepath:Path, config:NamedTuple, flags:str=None)->Tuple[HTS, List[FNode], List[FNode]]: ''' Reads an initial state file and produces (HTS, invariants, ltl_invariants) ''' hts = HTS(filepath.name) ts = TS("TS %s"%filepath.name) init = [] with filepath.open("r") as f: hts = self.parse_string(f.read()) return hts, None, None
def parse_string(self, strinput): hts = HTS() ts = TS() nodemap = {} node_covered = set([]) # list of tuples of var and cond_assign_list # cond_assign_list is tuples of (condition, value) # where everything is a pysmt FNode # for btor, the condition is always True ftrans = [] initlist = [] invarlist = [] invar_props = [] ltl_props = [] prop_count = 0 # clean string input, remove special characters from names for sc, rep in special_char_replacements.items(): strinput = strinput.replace(sc, rep) def getnode(nid): node_covered.add(nid) if int(nid) < 0: return Ite(BV2B(nodemap[str(-int(nid))]), BV(0, 1), BV(1, 1)) return nodemap[nid] def binary_op(bvop, bop, left, right): if (get_type(left) == BOOL) and (get_type(right) == BOOL): return bop(left, right) return bvop(B2BV(left), B2BV(right)) def unary_op(bvop, bop, left): if (get_type(left) == BOOL): return bop(left) return bvop(left) for line in strinput.split(NL): linetok = line.split() if len(linetok) == 0: continue if linetok[0] == COM: continue (nid, ntype, *nids) = linetok if ntype == SORT: (stype, *attr) = nids if stype == BITVEC: nodemap[nid] = BVType(int(attr[0])) node_covered.add(nid) if stype == ARRAY: nodemap[nid] = ArrayType(getnode(attr[0]), getnode(attr[1])) node_covered.add(nid) if ntype == WRITE: nodemap[nid] = Store(*[getnode(n) for n in nids[1:4]]) if ntype == READ: nodemap[nid] = Select(getnode(nids[1]), getnode(nids[2])) if ntype == ZERO: nodemap[nid] = BV(0, getnode(nids[0]).width) if ntype == ONE: nodemap[nid] = BV(1, getnode(nids[0]).width) if ntype == ONES: width = getnode(nids[0]).width nodemap[nid] = BV((2**width) - 1, width) if ntype == REDOR: width = get_type(getnode(nids[1])).width zeros = BV(0, width) nodemap[nid] = BVNot(BVComp(getnode(nids[1]), zeros)) if ntype == REDAND: width = get_type(getnode(nids[1])).width ones = BV((2**width) - 1, width) nodemap[nid] = BVComp(getnode(nids[1]), ones) if ntype == CONSTD: width = getnode(nids[0]).width nodemap[nid] = BV(int(nids[1]), width) if ntype == CONST: width = getnode(nids[0]).width try: nodemap[nid] = BV(bin_to_dec(nids[1]), width) except ValueError: if not all([i == 'x' or i == 'z' for i in nids[1]]): raise RuntimeError( "If not a valid number, only support " "all don't cares or high-impedance but got {}". format(nids[1])) # create a fresh variable for this non-deterministic constant nodemap[nid] = Symbol('const_' + nids[1], BVType(width)) ts.add_state_var(nodemap[nid]) Logger.warning( "Creating a fresh symbol for unsupported X/Z constant %s" % nids[1]) if ntype == STATE: if len(nids) > 1: nodemap[nid] = Symbol(nids[1], getnode(nids[0])) else: nodemap[nid] = Symbol((SN % nid), getnode(nids[0])) ts.add_state_var(nodemap[nid]) if ntype == INPUT: if len(nids) > 1: nodemap[nid] = Symbol(nids[1], getnode(nids[0])) else: nodemap[nid] = Symbol((SN % nid), getnode(nids[0])) ts.add_input_var(nodemap[nid]) if ntype == OUTPUT: # unfortunately we need to create an extra symbol just to have the output name # we could be smarter about this, but then this parser can't be greedy original_symbol = B2BV(getnode(nids[0])) output_symbol = Symbol(nids[1], original_symbol.get_type()) nodemap[nid] = EqualsOrIff(output_symbol, original_symbol) invarlist.append(nodemap[nid]) node_covered.add(nid) ts.add_output_var(output_symbol) if ntype == AND: nodemap[nid] = binary_op(BVAnd, And, getnode(nids[1]), getnode(nids[2])) if ntype == CONCAT: nodemap[nid] = BVConcat(B2BV(getnode(nids[1])), B2BV(getnode(nids[2]))) if ntype == XOR: nodemap[nid] = binary_op(BVXor, Xor, getnode(nids[1]), getnode(nids[2])) if ntype == XNOR: nodemap[nid] = BVNot( binary_op(BVXor, Xor, getnode(nids[1]), getnode(nids[2]))) if ntype == NAND: bvop = lambda x, y: BVNot(BVAnd(x, y)) bop = lambda x, y: Not(And(x, y)) nodemap[nid] = binary_op(bvop, bop, getnode(nids[1]), getnode(nids[2])) if ntype == IMPLIES: nodemap[nid] = BVOr(BVNot(getnode(nids[1])), getnode(nids[2])) if ntype == NOT: nodemap[nid] = unary_op(BVNot, Not, getnode(nids[1])) if ntype == NEG: nodemap[nid] = unary_op(BVNeg, Not, getnode(nids[1])) if ntype == UEXT: nodemap[nid] = BVZExt(B2BV(getnode(nids[1])), int(nids[2])) if ntype == SEXT: nodemap[nid] = BVSExt(B2BV(getnode(nids[1])), int(nids[2])) if ntype == OR: nodemap[nid] = binary_op(BVOr, Or, getnode(nids[1]), getnode(nids[2])) if ntype == ADD: nodemap[nid] = BVAdd(B2BV(getnode(nids[1])), B2BV(getnode(nids[2]))) if ntype == SUB: nodemap[nid] = BVSub(B2BV(getnode(nids[1])), B2BV(getnode(nids[2]))) if ntype == UGT: nodemap[nid] = BVUGT(B2BV(getnode(nids[1])), B2BV(getnode(nids[2]))) if ntype == UGTE: nodemap[nid] = BVUGE(B2BV(getnode(nids[1])), B2BV(getnode(nids[2]))) if ntype == ULT: nodemap[nid] = BVULT(B2BV(getnode(nids[1])), B2BV(getnode(nids[2]))) if ntype == ULTE: nodemap[nid] = BVULE(B2BV(getnode(nids[1])), B2BV(getnode(nids[2]))) if ntype == SGT: nodemap[nid] = BVSGT(B2BV(getnode(nids[1])), B2BV(getnode(nids[2]))) if ntype == SGTE: nodemap[nid] = BVSGE(B2BV(getnode(nids[1])), B2BV(getnode(nids[2]))) if ntype == SLT: nodemap[nid] = BVSLT(B2BV(getnode(nids[1])), B2BV(getnode(nids[2]))) if ntype == SLTE: nodemap[nid] = BVSLE(B2BV(getnode(nids[1])), B2BV(getnode(nids[2]))) if ntype == EQ: nodemap[nid] = BVComp(B2BV(getnode(nids[1])), B2BV(getnode(nids[2]))) if ntype == NEQ: nodemap[nid] = BVNot(BVComp(getnode(nids[1]), getnode(nids[2]))) if ntype == MUL: nodemap[nid] = BVMul(B2BV(getnode(nids[1])), B2BV(getnode(nids[2]))) if ntype == SLICE: nodemap[nid] = BVExtract(B2BV(getnode(nids[1])), int(nids[3]), int(nids[2])) if ntype == SLL: nodemap[nid] = BVLShl(getnode(nids[1]), getnode(nids[2])) if ntype == SRA: nodemap[nid] = BVAShr(getnode(nids[1]), getnode(nids[2])) if ntype == SRL: nodemap[nid] = BVLShr(getnode(nids[1]), getnode(nids[2])) if ntype == ITE: if (get_type(getnode(nids[2])) == BOOL) or (get_type( getnode(nids[3])) == BOOL): nodemap[nid] = Ite(BV2B(getnode(nids[1])), B2BV(getnode(nids[2])), B2BV(getnode(nids[3]))) else: nodemap[nid] = Ite(BV2B(getnode(nids[1])), getnode(nids[2]), getnode(nids[3])) if ntype == NEXT: if (get_type(getnode(nids[1])) == BOOL) or (get_type( getnode(nids[2])) == BOOL): lval = TS.get_prime(getnode(nids[1])) rval = B2BV(getnode(nids[2])) else: lval = TS.get_prime(getnode(nids[1])) rval = getnode(nids[2]) nodemap[nid] = EqualsOrIff(lval, rval) ftrans.append((lval, [(TRUE(), rval)])) if ntype == INIT: if (get_type(getnode(nids[1])) == BOOL) or (get_type( getnode(nids[2])) == BOOL): nodemap[nid] = EqualsOrIff(BV2B(getnode(nids[1])), BV2B(getnode(nids[2]))) elif get_type(getnode(nids[1])).is_array_type(): _type = get_type(getnode(nids[1])) nodemap[nid] = EqualsOrIff( getnode(nids[1]), Array(_type.index_type, default=getnode(nids[2]))) else: nodemap[nid] = EqualsOrIff(getnode(nids[1]), getnode(nids[2])) initlist.append(getnode(nid)) if ntype == CONSTRAINT: nodemap[nid] = BV2B(getnode(nids[0])) invarlist.append(getnode(nid)) if ntype == BAD: nodemap[nid] = getnode(nids[0]) if len(nids) > 1: assert_name = nids[1] description = "Embedded assertion: {}".format(assert_name) else: assert_name = 'embedded_assertion_%i' % prop_count description = 'Embedded assertion number %i' % prop_count prop_count += 1 # Following problem format (name, description, strformula) invar_props.append( (assert_name, description, Not(BV2B(getnode(nid))))) if nid not in nodemap: Logger.error("Unknown node type \"%s\"" % ntype) # get wirename if it exists if ntype not in {STATE, INPUT, OUTPUT, BAD}: # disregard comments at the end of the line try: symbol_idx = nids.index(';') symbol_idx -= 1 # the symbol should be before the comment except: # the symbol is just the end symbol_idx = -1 # check for wirename, if it's an integer, then it's a node ref try: a = int(nids[symbol_idx]) except: try: name = str(nids[symbol_idx]) # use the exact name, unless it has already been used wire = Symbol(name, getnode(nids[0])) if wire in ts.vars: wire = FreshSymbol(getnode(nids[0]), template=name + "%d") invarlist.append(EqualsOrIff(wire, B2BV(nodemap[nid]))) ts.add_var(wire) except: pass if Logger.level(1): name = lambda x: str(nodemap[x]) if nodemap[x].is_symbol() else x uncovered = [name(x) for x in nodemap if x not in node_covered] uncovered.sort() if len(uncovered) > 0: Logger.warning("Unlinked nodes \"%s\"" % ",".join(uncovered)) if not self.symbolic_init: init = simplify(And(initlist)) else: init = TRUE() invar = simplify(And(invarlist)) # instead of trans, we're using the ftrans format -- see below ts.set_behavior(init, TRUE(), invar) # add ftrans for var, cond_assign_list in ftrans: ts.add_func_trans(var, cond_assign_list) hts.add_ts(ts) return (hts, invar_props, ltl_props)
def parse_model(self, \ relative_path, \ model_files, \ encoder_config, \ name=None, \ modifier=None, \ cache_files=False, \ clean_cache=False): hts = HTS(name if name is not None else "System") invar_props = [] ltl_props = [] models = model_files.split(FILE_SP) for strfile in models: (strfile, flags) = self.get_file_flags(strfile) filetype = strfile.split(".")[-1] strfile = strfile.replace("~", os.path.expanduser("~")) if strfile[0] != "/": strfile = relative_path + strfile parser = None for av_parser in ModelParsersFactory.get_parsers(): assert av_parser.name is not None if filetype in av_parser.get_extensions(): parser = av_parser if not self.parser: self.parser = av_parser if parser is not None: if not os.path.isfile(strfile): Logger.error("File \"%s\" does not exist" % strfile) if cache_files: md5 = self.md5(strfile) cf = "-".join(["1" if encoder_config.abstract_clock else "0", \ "1" if encoder_config.add_clock else "0", \ "1" if encoder_config.boolean else "0"]) cachefile = "%s-%s" % (md5, cf) cachedir = "%s/%s" % ("/".join( strfile.split("/")[:-1]), COSACACHEDIR) if cache_files and self._is_cached(cachedir, cachefile, clean_cache): Logger.msg( "Loading from cache file \"%s\"... " % (strfile), 0) (hts_a, inv_a, ltl_a, model_info) = self._from_cache(cachedir, cachefile, encoder_config, flags) else: Logger.msg("Parsing file \"%s\"... " % (strfile), 0) (hts_a, inv_a, ltl_a) = parser.parse_file(strfile, encoder_config, flags) model_info = parser.get_model_info() if modifier is not None: modifier(hts_a) if cache_files and not clean_cache: self._to_cache(cachedir, cachefile, hts_a, inv_a, ltl_a, model_info) self.model_info.combine(model_info) hts.combine(hts_a) invar_props += inv_a ltl_props += ltl_a Logger.log("DONE", 0) continue Logger.error( "Filetype \"%s\" unsupported or parser is not available" % filetype) if Logger.level(1): print(hts.print_statistics(name, Logger.level(2))) return (hts, invar_props, ltl_props)
def generate_STS(self, lines): ts = TS("Additional system") init = TRUE() trans = TRUE() invar = TRUE() states = {} assigns = set([]) varsmap = {} def def_var(name, vtype): if name in varsmap: return varsmap[name] var = Symbol(name, vtype) ts.add_state_var(var) return var for line in lines: if line.comment: continue if line.init: if T_I not in states: states[T_I] = TRUE() if line.init.varname != "": (value, typev) = self.__get_value(line.init.value) ivar = def_var(line.init.varname, typev) state = EqualsOrIff(ivar, value) else: state = TRUE() if line.init.value == T_TRUE else FALSE() states[T_I] = And(states[T_I], state) # Optimization for the initial state assignment init = And(init, state) state = TRUE() if line.state: sname = T_S + line.state.id if (line.state.varname != ""): (value, typev) = self.__get_value(line.state.value) ivar = def_var(line.state.varname, typev) state = EqualsOrIff(ivar, value) assval = (sname, line.state.varname) if assval not in assigns: assigns.add(assval) else: Logger.error( "Double assignment for variable \"%s\" at state \"%s\"" % (line.state.varname, sname)) else: state = TRUE() if line.state.value == T_TRUE else FALSE() if sname not in states: states[sname] = TRUE() states[sname] = And(states[sname], state) stateid_width = math.ceil(math.log(len(states)) / math.log(2)) stateid_var = Symbol(self.new_state_id(), BVType(stateid_width)) init = And(init, EqualsOrIff(stateid_var, BV(0, stateid_width))) invar = And( invar, Implies(EqualsOrIff(stateid_var, BV(0, stateid_width)), states[T_I])) states[T_I] = EqualsOrIff(stateid_var, BV(0, stateid_width)) count = 1 state_items = list(states.keys()) state_items.sort() for state in state_items: if state == T_I: continue invar = And( invar, Implies(EqualsOrIff(stateid_var, BV(count, stateid_width)), states[state])) states[state] = EqualsOrIff(stateid_var, BV(count, stateid_width)) count += 1 transdic = {} for line in lines: if line.comment: continue if line.trans: if states[line.trans.start] not in transdic: transdic[states[line.trans.start]] = [] transdic[states[line.trans.start]].append( states[line.trans.end]) for transition in transdic: (start, end) = (transition, transdic[transition]) trans = And(trans, Implies(start, TS.to_next(Or(end)))) vars_ = [v for v in get_free_variables(trans) if not TS.is_prime(v)] vars_ += get_free_variables(init) vars_ += get_free_variables(invar) invar = And(invar, BVULE(stateid_var, BV(count - 1, stateid_width))) ts.set_behavior(init, trans, invar) ts.add_state_var(stateid_var) hts = HTS("ETS") hts.add_ts(ts) invar_props = [] ltl_props = [] return (hts, invar_props, ltl_props)
def parse_string(self, strinput): lines = [] pstring = self.parser.parseString(strinput, parseAll=True) hts = HTS("STS") invar_props = [] ltl_props = [] modules = [] modulesdic = {} name = MAIN mainmodule = None for psts in pstring.stss: var_str = [] state_str = [] input_str = [] output_str = [] sub_str = [] par_str = [] init_str = [] trans_str = [] invar_str = [] if len(psts.moduledef) > 0: name = psts.moduledef[1] if len(psts.pardef) > 0: vardefs = psts.pardef for vardef in self._split_list(vardefs, T_CM): varname = vardef[0] vartype = vardef[2] varpar = vardef[4:-1] if vartype != T_BOOL else None par_str.append((varname, vartype, varpar)) if P_VARDEFS in dict(psts): vardefs = list(dict(psts.var)[P_VARDEFS]) for vardef in self._split_list(vardefs, T_SC): varname = vardef[0] if varname[0] == "'": varname = varname[1:-1] vartype = vardef[2] varpar = vardef[4:-1] if vartype != T_BOOL else None if vartype in (T_BV, T_BOOL): var_str.append((varname, vartype, varpar)) else: sub_str.append( (varname, vartype, self._split_list(varpar, T_CM))) if P_STATEDEFS in dict(psts): statedefs = list(dict(psts.state)[P_STATEDEFS]) for statedef in self._split_list(statedefs, T_SC): statename = statedef[0] if statename[0] == "'": statename = statename[1:-1] statetype = statedef[2] statepar = statedef[4:-1] if statetype != T_BOOL else None state_str.append((statename, statetype, statepar)) if P_INPUTDEFS in dict(psts): inputdefs = list(dict(psts.input)[P_INPUTDEFS]) for inputdef in self._split_list(inputdefs, T_SC): inputname = inputdef[0] if inputname[0] == "'": inputname = inputname[1:-1] inputtype = inputdef[2] inputpar = inputdef[4:-1] if inputtype != T_BOOL else None input_str.append((inputname, inputtype, inputpar)) if P_OUTPUTDEFS in dict(psts): outputdefs = list(dict(psts.output)[P_OUTPUTDEFS]) for outputdef in self._split_list(outputdefs, T_SC): outputname = outputdef[0] if outputname[0] == "'": outputname = outputname[1:-1] outputtype = outputdef[2] outputpar = outputdef[ 4:-1] if outputtype != T_BOOL else None output_str.append((outputname, outputtype, outputpar)) if P_INIT in dict(psts): inits = list(dict(psts.init)[P_FORMULAE]) for i in range(0, len(inits), 2): init_str.append(inits[i]) if P_TRANS in dict(psts): transs = list(dict(psts.trans)[P_FORMULAE]) for i in range(0, len(transs), 2): trans_str.append(transs[i]) if P_INVAR in dict(psts): invars = list(dict(psts.invar)[P_FORMULAE]) for i in range(0, len(invars), 2): invar_str.append(invars[i]) module = STSModule(name, var_str, state_str, input_str, output_str, par_str, init_str, invar_str, trans_str, sub_str) modules.append(module) if name == MAIN: mainmodule = module else: modulesdic[name] = module #hts.add_ts(self.generate_STS(var_str, init_str, invar_str, trans_str)) hts = self.generate_HTS(mainmodule, modulesdic) hts.flatten() return (hts, invar_props, ltl_props)
def run_verification(config): reset_env() Logger.verbosity = config.verbosity coreir_parser = None ets_parser = None sts_parser = None if config.ltl: ltl_reset_env() hts = HTS("Top level") if config.strfiles[0][-4:] != ".pkl": ps = ProblemSolver() (hts, invar_props, ltl_props) = ps.parse_model("./", config.strfiles, config.abstract_clock, config.symbolic_init, deterministic=config.deterministic, boolean=config.boolean, no_clock=config.no_clock) config.parser = ps.parser if config.pickle_file: Logger.msg("Pickling model to %s\n" % (config.pickle_file), 1) sys.setrecursionlimit(50000) with open(config.pickle_file, "wb") as f: pickle.dump(hts, f) else: if config.pickle_file: raise RuntimeError("Don't need to re-pickle the input file %s" % (config.strfile)) Logger.msg("Loading pickle file %s\n" % (config.strfile), 0) with open(config.pickle_file, "rb") as f: hts = pickle.load(f) Logger.log("DONE", 0) printsmv = True mc_config = MCConfig() sparser = StringParser() sparser.remap_or2an = config.parser.remap_or2an ltlparser = LTLParser() # if equivalence checking wait to add assumptions to combined system if config.assumptions is not None and config.equivalence is None: Logger.log("Adding %d assumptions... " % len(config.assumptions), 1) assumps = [t[1] for t in sparser.parse_formulae(config.assumptions)] hts.assumptions = assumps lemmas = None if config.lemmas is not None: Logger.log("Adding %d lemmas... " % len(config.lemmas), 1) parsed_formulae = sparser.parse_formulae(config.lemmas) if list(set([t[2] for t in parsed_formulae]))[0][0] != False: Logger.error("Lemmas do not support \"next\" operators") lemmas = [t[1] for t in parsed_formulae] hts.lemmas = lemmas mc_config.smt2file = config.smt2file mc_config.full_trace = config.full_trace mc_config.trace_vars_change = config.trace_vars_change mc_config.trace_all_vars = config.trace_all_vars mc_config.prefix = config.prefix mc_config.strategy = config.strategy mc_config.skip_solving = config.skip_solving mc_config.map_function = config.parser.remap_an2or mc_config.solver_name = config.solver_name mc_config.vcd_trace = config.vcd mc_config.prove = config.prove mc_config.incremental = config.incremental if config.ltl: bmc_ltl = BMCLTL(hts, mc_config) else: bmc_safety = BMCSafety(hts, mc_config) if config.translate: Logger.log("Writing system to \"%s\"" % (config.translate), 0) printer = PrintersFactory.printer_by_name(config.printer) props = [] if config.ltl: props += ltlparser.parse_formulae(config.properties) props += [(str(p), p, None) for p in ltl_props] else: props += sparser.parse_formulae(config.properties) props += [(str(p), p, None) for p in invar_props] with open(config.translate, "w") as f: f.write(printer.print_hts(hts, props)) if config.simulate: count = 0 if config.properties is None: props = [("True", TRUE(), None)] else: props = sparser.parse_formulae(config.properties) for (strprop, prop, _) in props: Logger.log("Simulation for property \"%s\":" % (strprop), 0) res, trace = bmc_safety.simulate(prop, config.bmc_length) if res == VerificationStatus.TRUE: count += 1 print_trace("Execution", trace, count, config.prefix) else: Logger.log("No execution found", 0) if config.safety: count = 0 props = sparser.parse_formulae(config.properties) props += [(str(p), p, None) for p in invar_props] if len(props) == 0: Logger.warning("Safety verification requires at least a property") for (strprop, prop, _) in props: Logger.log("Safety verification for property \"%s\":" % (strprop), 0) res, trace, t = bmc_safety.safety(prop, config.bmc_length, config.bmc_length_min) Logger.log("\nProperty is %s" % res, 0) if res == VerificationStatus.FALSE: count += 1 print_trace("Counterexample", trace, count, config.prefix) return 0 if config.equivalence or config.fsm_check: if config.equivalence: parser2 = CoreIRParser(config.abstract_clock, config.symbolic_init, config.run_passes) Logger.msg("Parsing file \"%s\"... " % (config.equivalence), 0) hts2 = parser2.parse_file(config.equivalence) Logger.log("DONE", 0) symb = " (symbolic init)" if config.symbolic_init else "" Logger.log( "Equivalence checking%s with k=%s:" % (symb, config.bmc_length), 0) if Logger.level(1): print(hts2.print_statistics("System 2", Logger.level(2))) else: hts2 = hts # TODO: Make incremental solving optional htseq, miter_out = Miter.combine_systems(hts, hts2, config.bmc_length, config.symbolic_init, config.properties, True) if config.assumptions is not None: Logger.log( "Adding %d assumptions to combined system... " % len(config.assumptions), 1) assumps = [ t[1] for t in sparser.parse_formulae(config.assumptions) ] htseq.assumptions = assumps # create bmc object for combined system bmcseq = BMC(htseq, mc_config) res, trace, t = bmcseq.safety(miter_out, config.bmc_length, config.bmc_length_min) msg = "Systems are %s equivalent" if config.equivalence else "System is%s deterministic" if res == VerificationStatus.FALSE: Logger.log(msg % (" not"), 0) print_trace("Counterexample", trace, 1, config.prefix) elif res == VerificationStatus.UNK: if config.symbolic_init: # strong equivalence with symbolic initial state Logger.log(msg % (""), 0) else: Logger.log(msg % ("") + " up to k=%i" % t, 0) else: Logger.log(msg % ("") + " up to k=%i" % t, 0) if config.ltl: count = 0 props = ltlparser.parse_formulae(config.properties) props += [(str(p), p, None) for p in ltl_props] if len(props) == 0: Logger.warning("LTL verification requires at least a property") for (strprop, prop, _) in props: Logger.log("LTL verification for property \"%s\":" % (strprop), 0) res, trace, t = bmc_ltl.ltl(prop, config.bmc_length, config.bmc_length_min) Logger.log("\nProperty is %s" % res, 0) if res == VerificationStatus.FALSE: count += 1 print_trace("Counterexample", trace, count, config.prefix) return 0
def generate_HTS(self, module, modulesdic): hts = HTS(module.name) ts = TS("TS %s" % module.name) init = [] trans = [] invar = [] params = [] sparser = StringParser() (vars, states, inputs, outputs) = self._collect_sub_variables(module, modulesdic, path=[], varlist=[], statelist=[], inputlist=[], outputlist=[]) for var in vars: ts.add_var(self._define_var(var, module.name)) for var in states: ts.add_state_var(self._define_var(var, module.name)) for var in inputs: ts.add_input_var(self._define_var(var, module.name)) for var in outputs: ts.add_output_var(self._define_var(var, module.name)) self._check_parameters(module, modulesdic, ts.vars) for par in module.pars: assert len(par) == 2, "Expecting a variable" hts.add_param(self._define_var((par[0], par[1]), module.name)) for init_s in module.init: formula = sparser.parse_formula(quote_names(init_s, module.name), False) init.append(formula) for invar_s in module.invar: formula = sparser.parse_formula(quote_names(invar_s, module.name), False) invar.append(formula) for trans_s in module.trans: formula = sparser.parse_formula(quote_names(trans_s, module.name), False) trans.append(formula) for sub in module.subs: hts.add_sub(sub[0], self.generate_HTS(modulesdic[sub[1]], modulesdic), tuple([v[0] for v in sub[2]])) ts.init = And(init) ts.invar = And(invar) ts.trans = And(trans) hts.add_ts(ts) return hts
def parse_string(self, strinput): hts = HTS() ts = TS() nodemap = {} node_covered = set([]) translist = [] initlist = [] invarlist = [] invar_props = [] ltl_props = [] def getnode(nid): node_covered.add(nid) if int(nid) < 0: return Ite(BV2B(nodemap[str(-int(nid))]), BV(0,1), BV(1,1)) return nodemap[nid] def binary_op(bvop, bop, left, right): if (get_type(left) == BOOL) and (get_type(right) == BOOL): return bop(left, right) return bvop(B2BV(left), B2BV(right)) def unary_op(bvop, bop, left): if (get_type(left) == BOOL): return bop(left) return bvop(left) for line in strinput.split(NL): linetok = line.split() if len(linetok) == 0: continue if linetok[0] == COM: continue (nid, ntype, *nids) = linetok if ntype == SORT: (stype, *attr) = nids if stype == BITVEC: nodemap[nid] = BVType(int(attr[0])) node_covered.add(nid) if stype == ARRAY: nodemap[nid] = ArrayType(getnode(attr[0]), getnode(attr[1])) node_covered.add(nid) if ntype == WRITE: nodemap[nid] = Store(*[getnode(n) for n in nids[1:4]]) if ntype == READ: nodemap[nid] = Select(getnode(nids[1]), getnode(nids[2])) if ntype == ZERO: nodemap[nid] = BV(0, getnode(nids[0]).width) if ntype == ONE: nodemap[nid] = BV(1, getnode(nids[0]).width) if ntype == ONES: width = getnode(nids[0]).width nodemap[nid] = BV((2**width)-1, width) if ntype == REDOR: width = get_type(getnode(nids[1])).width zeros = BV(0, width) nodemap[nid] = BVNot(BVComp(getnode(nids[1]), zeros)) if ntype == REDAND: width = get_type(getnode(nids[1])).width ones = BV((2**width)-1, width) nodemap[nid] = BVComp(getnode(nids[1]), ones) if ntype == CONSTD: width = getnode(nids[0]).width nodemap[nid] = BV(int(nids[1]), width) if ntype == CONST: width = getnode(nids[0]).width nodemap[nid] = BV(bin_to_dec(nids[1]), width) if ntype == STATE: if len(nids) > 1: nodemap[nid] = Symbol(nids[1], getnode(nids[0])) else: nodemap[nid] = Symbol((SN%nid), getnode(nids[0])) ts.add_state_var(nodemap[nid]) if ntype == INPUT: if len(nids) > 1: nodemap[nid] = Symbol(nids[1], getnode(nids[0])) else: nodemap[nid] = Symbol((SN%nid), getnode(nids[0])) ts.add_input_var(nodemap[nid]) if ntype == OUTPUT: if len(nids) > 2: symbol = Symbol(nids[2], getnode(nids[0])) else: symbol = Symbol((SN%nid), getnode(nids[0])) nodemap[nid] = EqualsOrIff(symbol, B2BV(getnode(nids[1]))) invarlist.append(nodemap[nid]) node_covered.add(nid) ts.add_output_var(symbol) if ntype == AND: nodemap[nid] = binary_op(BVAnd, And, getnode(nids[1]), getnode(nids[2])) if ntype == CONCAT: nodemap[nid] = BVConcat(B2BV(getnode(nids[1])), B2BV(getnode(nids[2]))) if ntype == XOR: nodemap[nid] = binary_op(BVXor, Xor, getnode(nids[1]), getnode(nids[2])) if ntype == NAND: bvop = lambda x,y: BVNot(BVAnd(x, y)) bop = lambda x,y: Not(And(x, y)) nodemap[nid] = binary_op(bvop, bop, getnode(nids[1]), getnode(nids[2])) if ntype == IMPLIES: nodemap[nid] = BVOr(BVNot(getnode(nids[1])), getnode(nids[2])) if ntype == NOT: nodemap[nid] = unary_op(BVNot, Not, getnode(nids[1])) if ntype == UEXT: nodemap[nid] = BVZExt(B2BV(getnode(nids[1])), int(nids[2])) if ntype == OR: nodemap[nid] = binary_op(BVOr, Or, getnode(nids[1]), getnode(nids[2])) if ntype == ADD: nodemap[nid] = BVAdd(B2BV(getnode(nids[1])), B2BV(getnode(nids[2]))) if ntype == SUB: nodemap[nid] = BVSub(B2BV(getnode(nids[1])), B2BV(getnode(nids[2]))) if ntype == UGT: nodemap[nid] = BVUGT(B2BV(getnode(nids[1])), B2BV(getnode(nids[2]))) if ntype == UGTE: nodemap[nid] = BVUGE(B2BV(getnode(nids[1])), B2BV(getnode(nids[2]))) if ntype == ULT: nodemap[nid] = BVULT(B2BV(getnode(nids[1])), B2BV(getnode(nids[2]))) if ntype == ULTE: nodemap[nid] = BVULE(B2BV(getnode(nids[1])), B2BV(getnode(nids[2]))) if ntype == EQ: nodemap[nid] = BVComp(getnode(nids[1]), getnode(nids[2])) if ntype == NE: nodemap[nid] = BVNot(BVComp(getnode(nids[1]), getnode(nids[2]))) if ntype == MUL: nodemap[nid] = BVMul(B2BV(getnode(nids[1])), B2BV(getnode(nids[2]))) if ntype == SLICE: nodemap[nid] = BVExtract(B2BV(getnode(nids[1])), int(nids[3]), int(nids[2])) if ntype == SLL: nodemap[nid] = BVLShl(getnode(nids[1]), getnode(nids[2])) if ntype == SRA: nodemap[nid] = BVAShr(getnode(nids[1]), getnode(nids[2])) if ntype == SRL: nodemap[nid] = BVLShr(getnode(nids[1]), getnode(nids[2])) if ntype == ITE: if (get_type(getnode(nids[2])) == BOOL) or (get_type(getnode(nids[3])) == BOOL): nodemap[nid] = Ite(BV2B(getnode(nids[1])), BV2B(getnode(nids[2])), BV2B(getnode(nids[3]))) else: nodemap[nid] = Ite(BV2B(getnode(nids[1])), getnode(nids[2]), getnode(nids[3])) if ntype == NEXT: if (get_type(getnode(nids[1])) == BOOL) or (get_type(getnode(nids[2])) == BOOL): nodemap[nid] = EqualsOrIff(BV2B(TS.get_prime(getnode(nids[1]))), BV2B(getnode(nids[2]))) else: nodemap[nid] = EqualsOrIff(TS.get_prime(getnode(nids[1])), getnode(nids[2])) translist.append(getnode(nid)) if ntype == INIT: if (get_type(getnode(nids[1])) == BOOL) or (get_type(getnode(nids[2])) == BOOL): nodemap[nid] = EqualsOrIff(BV2B(getnode(nids[1])), BV2B(getnode(nids[2]))) else: nodemap[nid] = EqualsOrIff(getnode(nids[1]), getnode(nids[2])) initlist.append(getnode(nid)) if ntype == CONSTRAINT: nodemap[nid] = BV2B(getnode(nids[0])) invarlist.append(getnode(nid)) if ntype == BAD: nodemap[nid] = getnode(nids[0]) invar_props.append(Not(BV2B(getnode(nid)))) if nid not in nodemap: Logger.error("Unknown node type \"%s\""%ntype) if Logger.level(1): name = lambda x: str(nodemap[x]) if nodemap[x].is_symbol() else x uncovered = [name(x) for x in nodemap if x not in node_covered] uncovered.sort() if len(uncovered) > 0: Logger.warning("Unlinked nodes \"%s\""%",".join(uncovered)) if not self.symbolic_init: init = simplify(And(initlist)) else: init = TRUE() trans = simplify(And(translist)) invar = simplify(And(invarlist)) ts.set_behavior(init, trans, invar) hts.add_ts(ts) return (hts, invar_props, ltl_props)
def parse_file(self, strfile, config, flags=None): self.config = config self.__reset_structures() Logger.msg("Reading CoreIR system... ", 1) top_module = self.context.load_from_file(strfile) if config.run_passes: self.run_passes() Modules.abstract_clock = self.config.abstract_clock Modules.symbolic_init = self.config.symbolic_init top_def = top_module.definition interface = list(top_module.type.items()) modules = {} sym_map = {} not_defined_mods = [] hts = HTS(top_module.name) invar_props = [] ltl_props = [] Logger.msg("Starting encoding... ", 1) count = 0 def extract_value(x, modname, inst_intr, inst_conf, inst_mod): if x in inst_intr: return self.BVVar(modname + x, inst_intr[x].size) if x in inst_conf: xval = inst_conf[x].value if type(xval) == bool: xval = 1 if xval else 0 else: if type(xval) != int: try: if xval.is_x(): xval = None else: xval = xval.as_uint() except: try: xval = xval.val except: xval = xval.unsigned_value return xval if inst_mod.generated: inst_args = inst_mod.generator_args if x in inst_args: return inst_args[x].value return None if Logger.level(1): timer = Logger.start_timer("IntConvertion", False) en_tprinting = False if Logger.level(2): ttimer = Logger.start_timer("Convertion", False) if self.config.deterministic: td_instances = top_def.instances top_def_instances = [(inst.selectpath, inst.config, inst.module) for inst in td_instances] top_def_instances.sort() else: top_def_instances = list(top_def.instances) totalinst = len(top_def_instances) for inst in top_def_instances: if Logger.level(1): count += 1 if count % 300 == 0: dtime = Logger.get_timer(timer, False) if dtime > 2: en_tprinting = True if en_tprinting: Logger.inline( "%s" % status_bar( (float(count) / float(totalinst))), 1) timer = Logger.start_timer("IntConvertion", False) if Logger.level(2): Logger.get_timer(timer, False) ts = None if self.config.deterministic: (inst_name, inst_conf, inst_mod) = inst else: inst_name = inst.selectpath inst_conf = inst.config inst_mod = inst.module inst_type = inst_mod.name inst_intr = dict(inst_mod.type.items()) modname = (SEP.join(inst_name)) + SEP values_dic = {} for x in self.attrnames: values_dic[x] = extract_value(x, modname, inst_intr, inst_conf, inst_mod) def args(ports_list): return [values_dic[x] for x in ports_list] sym = self.__mod_to_sym(inst_type, args) if sym is not None: sym_map[sym[0].symbol_name()] = (sym[0], sym[1]) continue ts = self.__mod_to_impl(inst_type, args) if ts is not None: if flags is not None: if CoreIRModelFlags.NO_INIT in flags: ts.init = TRUE() if CoreIRModelFlags.FC_LEMMAS in flags: for v in ts.vars: v_name = v.symbol_name() if (CR in v_name) or (RCR in v_name): cons_v_name = v_name[:len( CR)] if CR in v_name else v_name[:len(RCR)] cons_v = Symbol(cons_v_name, v.symbol_type()) lemma = EqualsOrIff( cons_v, BV(values_dic[self.VALUE], cons_v.symbol_type().width)) hts.add_lemma(lemma) for v in ts.state_vars: lemma = EqualsOrIff( v, BV(values_dic[self.INIT], v.symbol_type().width)) hts.add_lemma(lemma) hts.add_ts(ts) else: if inst_type not in not_defined_mods: intface = ", ".join([ "%s" % (v) for v in values_dic if values_dic[v] is not None ]) Logger.error( "Module type \"%s\" with interface \"%s\" is not defined" % (inst_type, intface)) not_defined_mods.append(inst_type) Logger.clear_inline(1) if self.config.deterministic: interface.sort() for var in interface: varname = SELF + SEP + var[0] bvvar = self.BVVar(varname, var[1].size) if (var[1].is_input()): hts.add_input_var(bvvar) else: hts.add_output_var(bvvar) # Adding clock behavior if (self.CLK in var[0].lower()) and (var[1].is_input()): self.clock_list.add(bvvar) if self.config.abstract_clock: self.abstract_clock_list.add( (bvvar, (BV(0, var[1].size), BV(1, var[1].size)))) varmap = dict([(s.symbol_name(), s) for s in hts.vars]) def split_paths(path): ret = [] for el in path: ret += el.split(CSEP) return ret def dict_select(dic, el): return dic[el] if el in dic else None eq_conns = [] eq_vars = set([]) if self.config.deterministic: td_connections = top_def.connections top_def_connections = [ ((conn.first.selectpath, conn.second.selectpath) if conn.first.selectpath < conn.second.selectpath else (conn.second.selectpath, conn.first.selectpath), conn) for conn in td_connections ] top_def_connections.sort() else: top_def_connections = list(top_def.connections) for conn in top_def_connections: if self.config.deterministic: first_selectpath = split_paths(conn[0][0]) second_selectpath = split_paths(conn[0][1]) else: first_selectpath = split_paths(conn.first.selectpath) second_selectpath = split_paths(conn.second.selectpath) first = SEP.join(first_selectpath) second = SEP.join(second_selectpath) firstvar = None secondvar = None if is_number(first_selectpath[-1]): firstname = SEP.join(first_selectpath[:-1]) else: firstname = SEP.join(first_selectpath) if is_number(second_selectpath[-1]): secondname = SEP.join(second_selectpath[:-1]) else: secondname = SEP.join(second_selectpath) first = (dict_select(varmap, self.remap_or2an(firstname)), None) second = (dict_select(varmap, self.remap_or2an(secondname)), None) firstvar = first[0] secondvar = second[0] if (firstvar is None) and (self.remap_or2an(firstname) in sym_map): firstvar = sym_map[self.remap_or2an(firstname)][1] if (secondvar is None) and (self.remap_or2an(secondname) in sym_map): secondvar = sym_map[self.remap_or2an(secondname)][1] if (firstvar is None) and (secondvar is not None): Logger.error("Symbol \"%s\" is not defined" % firstname) first = (Symbol(self.remap_or2an(firstname), secondvar.symbol_type()), None) else: if firstvar.is_constant(): sel = int(first_selectpath[-1]) if (is_number( first_selectpath[-1])) else None first = (firstvar, sel) else: if (is_number(first_selectpath[-1])) and ( firstvar.symbol_type() != BOOL) and (firstvar.symbol_type().width > 1): sel = int(first_selectpath[-1]) first = (firstvar, sel) if (firstvar is not None) and (secondvar is None): Logger.error("Symbol \"%s\" is not defined" % secondname) second = (Symbol(self.remap_or2an(secondname), firstvar.symbol_type()), None) else: if secondvar.is_constant(): sel = int(second_selectpath[-1]) if (is_number( second_selectpath[-1])) else None second = (secondvar, sel) else: if (is_number(second_selectpath[-1])) and ( secondvar.symbol_type() != BOOL) and (secondvar.symbol_type().width > 1): sel = int(second_selectpath[-1]) second = (secondvar, sel) assert ((firstvar is not None) and (secondvar is not None)) eq_conns.append((first, second)) if firstvar.is_symbol(): eq_vars.add(firstvar) if secondvar.is_symbol(): eq_vars.add(secondvar) conns_len = len(eq_conns) if self.pack_connections: eq_conns = self.__pack_connections(eq_conns) if len(eq_conns) < conns_len: Logger.log("Packed %d connections" % (conns_len - len(eq_conns)), 1) eq_formula = TRUE() for eq_conn in eq_conns: (fst, snd) = eq_conn if fst[1] is None: first = fst[0] else: if len(fst) > 2: first = BVExtract(fst[0], fst[1], fst[2]) else: first = BVExtract(fst[0], fst[1], fst[1]) if snd[1] is None: second = snd[0] else: if len(snd) > 2: second = BVExtract(snd[0], snd[1], snd[2]) else: second = BVExtract(snd[0], snd[1], snd[1]) if (first.get_type() != BOOL) and (second.get_type() == BOOL): second = Ite(second, BV(1, 1), BV(0, 1)) if (first.get_type() == BOOL) and (second.get_type() != BOOL): first = Ite(first, BV(1, 1), BV(0, 1)) eq_formula = And(eq_formula, EqualsOrIff(first, second)) Logger.log(str(EqualsOrIff(first, second)), 3) ts = TS("Connections") ts.invar = eq_formula ts.vars = eq_vars hts.add_ts(ts) if self.enc_map is not None: del (self.enc_map) if Logger.level(2): Logger.get_timer(ttimer) return (hts, invar_props, ltl_props)
def parse_file(self, file_path, config, flags=None): # coreir needs a string representing the path strfile = str(file_path) self.config = config self.__reset_structures() Logger.msg("Reading CoreIR system... ", 1) top_module = self.context.load_from_file(strfile) if config.run_coreir_passes: self.run_passes() Modules.abstract_clock = self.config.abstract_clock Modules.symbolic_init = self.config.symbolic_init top_def = top_module.definition interface = list(top_module.type.items()) modules = {} sym_map = {} not_defined_mods = [] hts = HTS(top_module.name) invar_props = [] ltl_props = [] Logger.msg("Starting encoding... ", 1) count = 0 def extract_value(x, modname, inst_intr, inst_conf, inst_mod): if x in inst_intr: return self.BVVar(modname + x, inst_intr[x].size) if x in inst_conf: xval = inst_conf[x].value if type(xval) == bool: xval = 1 if xval else 0 else: if type(xval) != int: try: xval = xval.as_uint() except: xval = None return xval if inst_mod.generated: inst_args = inst_mod.generator_args if x in inst_args: return inst_args[x].value return None if Logger.level(1): timer = Logger.start_timer("IntConvertion", False) en_tprinting = False if Logger.level(2): ttimer = Logger.start_timer("Convertion", False) td_instances = top_def.instances top_def_instances = [(inst.selectpath, inst.config, inst.module) for inst in td_instances] # sorting keeps the behavior deterministic top_def_instances.sort() totalinst = len(top_def_instances) for inst in top_def_instances: if Logger.level(1): count += 1 if count % 300 == 0: dtime = Logger.get_timer(timer, False) if dtime > 2: en_tprinting = True if en_tprinting: Logger.inline( "%s" % status_bar( (float(count) / float(totalinst))), 1) timer = Logger.start_timer("IntConvertion", False) if Logger.level(2): Logger.get_timer(timer, False) ts = None (inst_name, inst_conf, inst_mod) = inst inst_type = inst_mod.name inst_intr = dict(inst_mod.type.items()) modname = (SEP.join(inst_name)) + SEP values_dic = {} for x in self.attrnames: values_dic[x] = extract_value(x, modname, inst_intr, inst_conf, inst_mod) def args(ports_list): return [values_dic[x] for x in ports_list] sym = self.__mod_to_sym(inst_type, args) if sym is not None: sym_map[sym[0].symbol_name()] = (sym[0], sym[1]) continue ts = self.__mod_to_impl(inst_type, args) if ts is not None: if flags is not None: if CoreIRModelFlags.NO_INIT in flags: ts.init = TRUE() if CoreIRModelFlags.FC_LEMMAS in flags: for v in ts.vars: v_name = v.symbol_name() if (CR in v_name) or (RCR in v_name): cons_v_name = v_name[:len( CR)] if CR in v_name else v_name[:len(RCR)] cons_v = Symbol(cons_v_name, v.symbol_type()) lemma = EqualsOrIff( cons_v, BV(values_dic[self.VALUE], cons_v.symbol_type().width)) hts.add_lemma(lemma) for v in ts.state_vars: lemma = EqualsOrIff( v, BV(values_dic[self.INIT], v.symbol_type().width)) hts.add_lemma(lemma) hts.add_ts(ts) else: if inst_type not in not_defined_mods: intface = ", ".join([ "%s" % (v) for v in values_dic if values_dic[v] is not None ]) Logger.error( "Module type \"%s\" with interface \"%s\" is not defined" % (inst_type, intface)) not_defined_mods.append(inst_type) Logger.clear_inline(1) # sorting keeps the behavior deterministic interface.sort() for var in interface: varname = SELF + SEP + var[0] bvvar = self.BVVar(varname, var[1].size) if (var[1].is_input()): hts.add_input_var(bvvar) else: hts.add_output_var(bvvar) if var[1].kind == NAMED and var[1].name == COREIR_CLK: self.clock_list.add(bvvar) if self.config.abstract_clock: self.abstract_clock_list.add( (bvvar, (BV(0, var[1].size), BV(1, var[1].size)))) else: # add state variable that stores the previous clock value # This is IMPORTANT for model checking soundness, but # it isn't obvious that this is necessary # # imagine we have an explicit clock encoding (not abstract_clock), e.g. # next(state_var) = (!clk & next(clk)) ? <state_update> : <old value> # and if we're trying to prove something using k-induction, there's a "loop free" # constraint that the state and output variables don't repeat (reach the same # state twice) in the trace # but on a negedge clock, there can be scenarios where no state or outputs # can be updated and we'll get a trivial unsat which will be interpreted as # a converged proof -- uh oh # # adding this state element just ensures that the loop free constraint won't # be violated trivially # e.g. on a neg-edge clock, this new state element will have changed # make it hidden (won't be printed) # HIDDEN_VAR is a prefix that printers check for trailing_clock_var = self.BVVar( "{}{}__prev".format(HIDDEN_VAR, varname), var[1].size) ts = TS() ts.add_state_var(trailing_clock_var) # the initial state for this trailing variable is unconstrained ts.set_behavior( TRUE(), EqualsOrIff(TS.get_prime(trailing_clock_var), bvvar), TRUE()) hts.add_ts(ts) varmap = dict([(s.symbol_name(), s) for s in hts.vars]) def split_paths(path): ret = [] for el in path: ret += el.split(CSEP) return ret def dict_select(dic, el): return dic[el] if el in dic else None eq_conns = [] eq_vars = set([]) td_connections = top_def.connections top_def_connections = [ ((conn.first.selectpath, conn.second.selectpath) if conn.first.selectpath < conn.second.selectpath else (conn.second.selectpath, conn.first.selectpath), conn) for conn in td_connections ] # sorting keeps the behavior deterministic top_def_connections.sort() for conn in top_def_connections: first_selectpath = split_paths(conn[0][0]) second_selectpath = split_paths(conn[0][1]) first = SEP.join(first_selectpath) second = SEP.join(second_selectpath) firstvar = None secondvar = None if is_number(first_selectpath[-1]): firstname = SEP.join(first_selectpath[:-1]) else: firstname = SEP.join(first_selectpath) if is_number(second_selectpath[-1]): secondname = SEP.join(second_selectpath[:-1]) else: secondname = SEP.join(second_selectpath) first = (dict_select(varmap, self.remap_or2an(firstname)), None) second = (dict_select(varmap, self.remap_or2an(secondname)), None) firstvar = first[0] secondvar = second[0] if (firstvar is None) and (self.remap_or2an(firstname) in sym_map): firstvar = sym_map[self.remap_or2an(firstname)][1] if (secondvar is None) and (self.remap_or2an(secondname) in sym_map): secondvar = sym_map[self.remap_or2an(secondname)][1] if (firstvar is None) and (secondvar is not None): Logger.error("Symbol \"%s\" is not defined" % firstname) first = (Symbol(self.remap_or2an(firstname), secondvar.symbol_type()), None) else: if firstvar.is_constant(): sel = int(first_selectpath[-1]) if (is_number( first_selectpath[-1])) else None first = (firstvar, sel) else: if (is_number(first_selectpath[-1])) and ( firstvar.symbol_type() != BOOL) and (firstvar.symbol_type().width > 1): sel = int(first_selectpath[-1]) first = (firstvar, sel) if (firstvar is not None) and (secondvar is None): Logger.error("Symbol \"%s\" is not defined" % secondname) second = (Symbol(self.remap_or2an(secondname), firstvar.symbol_type()), None) else: if secondvar.is_constant(): sel = int(second_selectpath[-1]) if (is_number( second_selectpath[-1])) else None second = (secondvar, sel) else: if (is_number(second_selectpath[-1])) and ( secondvar.symbol_type() != BOOL) and (secondvar.symbol_type().width > 1): sel = int(second_selectpath[-1]) second = (secondvar, sel) assert ((firstvar is not None) and (secondvar is not None)) eq_conns.append((first, second)) if firstvar.is_symbol(): eq_vars.add(firstvar) if secondvar.is_symbol(): eq_vars.add(secondvar) conns_len = len(eq_conns) if self.pack_connections: eq_conns = self.__pack_connections(eq_conns) if len(eq_conns) < conns_len: Logger.log("Packed %d connections" % (conns_len - len(eq_conns)), 1) eq_formula = TRUE() for eq_conn in eq_conns: (fst, snd) = eq_conn if fst[1] is None: first = fst[0] else: if len(fst) > 2: first = BVExtract(fst[0], fst[1], fst[2]) else: first = BVExtract(fst[0], fst[1], fst[1]) if snd[1] is None: second = snd[0] else: if len(snd) > 2: second = BVExtract(snd[0], snd[1], snd[2]) else: second = BVExtract(snd[0], snd[1], snd[1]) if (first.get_type() != BOOL) and (second.get_type() == BOOL): second = Ite(second, BV(1, 1), BV(0, 1)) if (first.get_type() == BOOL) and (second.get_type() != BOOL): first = Ite(first, BV(1, 1), BV(0, 1)) eq_formula = And(eq_formula, EqualsOrIff(first, second)) Logger.log(str(EqualsOrIff(first, second)), 3) ts = TS("Connections") ts.invar = eq_formula ts.vars = eq_vars hts.add_ts(ts) if self.enc_map is not None: del (self.enc_map) if Logger.level(2): Logger.get_timer(ttimer) # check that clocks were detected if there's any state if hts.state_vars: assert self.clock_list, "Expecting clocks if there are state variables" return (hts, invar_props, ltl_props)
def parse_model(self, \ relative_path, \ model_files, \ abstract_clock, \ symbolic_init, \ name=None, \ deterministic=False, \ boolean=False, \ no_clock=False, \ run_passes=True): hts = HTS("System 1") invar_props = [] ltl_props = [] models = model_files.split(MODEL_SP) for strfile in models: (strfile, flags) = self.get_file_flags(strfile) filetype = strfile.split(".")[-1] strfile = strfile.replace("~", os.path.expanduser("~")) if strfile[0] != "/": strfile = relative_path + strfile parser = None if filetype in CoreIRParser.get_extensions(): parser = CoreIRParser(abstract_clock, symbolic_init, no_clock, run_passes) parser.boolean = boolean parser.deterministic = deterministic self.parser = parser if filetype in ExplicitTSParser.get_extensions(): parser = ExplicitTSParser() if not self.parser: self.parser = parser if filetype in SymbolicTSParser.get_extensions(): parser = SymbolicTSParser() if not self.parser: self.parser = parser if filetype in SymbolicSimpleTSParser.get_extensions(): parser = SymbolicSimpleTSParser() if not self.parser: self.parser = parser if filetype in BTOR2Parser.get_extensions(): parser = BTOR2Parser() if not self.parser: self.parser = parser if parser is not None: if not os.path.isfile(strfile): Logger.error("File \"%s\" does not exist" % strfile) Logger.msg("Parsing file \"%s\"... " % (strfile), 0) (hts_a, inv_a, ltl_a) = parser.parse_file(strfile, flags) hts.combine(hts_a) invar_props += inv_a ltl_props += ltl_a Logger.log("DONE", 0) continue Logger.error("Filetype \"%s\" unsupported" % filetype) if Logger.level(1): print(hts.print_statistics(name, Logger.level(2))) return (hts, invar_props, ltl_props)
def parse_string(self, lines): [none, var, state, input, output, init, invar, trans] = range(8) section = none inits = TRUE() invars = TRUE() transs = TRUE() sparser = StringParser() count = 0 vars = set([]) states = set([]) inputs = set([]) outputs = set([]) invar_props = [] ltl_props = [] for line in lines: count += 1 if line.strip() in ["", "\n"]: continue if T_VAR == line[:len(T_VAR)]: section = var continue if T_STATE == line[:len(T_STATE)]: section = state continue if T_INPUT == line[:len(T_INPUT)]: section = input continue if T_OUTPUT == line[:len(T_OUTPUT)]: section = output continue if T_INIT == line[:len(T_INIT)]: section = init continue if T_INVAR == line[:len(T_INVAR)]: section = invar continue if T_TRANS == line[:len(T_TRANS)]: section = trans continue if section in [var, state, input, output]: line = line[:-2].replace(" ", "").split(":") varname, vartype = line[0], (line[1][:-1].split("(")) if varname[0] == "'": varname = varname[1:-1] vardef = self._define_var(varname, vartype) vars.add(vardef) if section == state: states.add(vardef) if section == input: inputs.add(vardef) if section == output: outputs.add(vardef) if section in [init, invar, trans]: qline = quote_names(line[:-2], replace_ops=False) if section == init: inits = And(inits, sparser.parse_formula(qline)) if section == invar: invars = And(invars, sparser.parse_formula(qline)) if section == trans: transs = And(transs, sparser.parse_formula(qline)) hts = HTS("STS") ts = TS() ts.vars = vars ts.state_vars = states ts.input_vars = inputs ts.output_vars = outputs ts.init = inits ts.invar = invars ts.trans = transs hts.add_ts(ts) return (hts, invar_props, ltl_props)
def parse_string(self, strinput): hts = HTS() ts = TS() nodemap = {} node_covered = set([]) # list of tuples of var and cond_assign_list # cond_assign_list is tuples of (condition, value) # where everything is a pysmt FNode # for btor, the condition is always True ftrans = [] initlist = [] invarlist = [] invar_props = [] ltl_props = [] prop_count = 0 # clean string input, remove special characters from names for sc, rep in special_char_replacements.items(): strinput = strinput.replace(sc, rep) def getnode(nid): node_covered.add(nid) if int(nid) < 0: return Ite(BV2B(nodemap[str(-int(nid))]), BV(0,1), BV(1,1)) return nodemap[nid] def binary_op(bvop, bop, left, right): if (get_type(left) == BOOL) and (get_type(right) == BOOL): return bop(left, right) return bvop(B2BV(left), B2BV(right)) def unary_op(bvop, bop, left): if (get_type(left) == BOOL): return bop(left) return bvop(left) for line in strinput.split(NL): linetok = line.split() if len(linetok) == 0: continue if linetok[0] == COM: continue (nid, ntype, *nids) = linetok if ntype == SORT: (stype, *attr) = nids if stype == BITVEC: nodemap[nid] = BVType(int(attr[0])) node_covered.add(nid) if stype == ARRAY: nodemap[nid] = ArrayType(getnode(attr[0]), getnode(attr[1])) node_covered.add(nid) if ntype == WRITE: nodemap[nid] = Store(*[getnode(n) for n in nids[1:4]]) if ntype == READ: nodemap[nid] = Select(getnode(nids[1]), getnode(nids[2])) if ntype == ZERO: nodemap[nid] = BV(0, getnode(nids[0]).width) if ntype == ONE: nodemap[nid] = BV(1, getnode(nids[0]).width) if ntype == ONES: width = getnode(nids[0]).width nodemap[nid] = BV((2**width)-1, width) if ntype == REDOR: width = get_type(getnode(nids[1])).width zeros = BV(0, width) nodemap[nid] = BVNot(BVComp(getnode(nids[1]), zeros)) if ntype == REDXOR: width = get_type(getnode(nids[1])).width nodemap[nid] = BV(0, width) zeros = BV(0, width) for yx_i in range(width): tmp = BV(1 << yx_i, width) tmp_2 = BVAnd(tmp, B2BV(getnode(nids[1]))) tmp_3 = BVZExt(B2BV(BVComp(tmp_2, zeros)), int(width - 1)) nodemap[nid] = BVAdd(tmp_3, nodemap[nid]) nodemap[nid] = BVComp(BVAnd(BV(1, width), nodemap[nid]), BV(1, width)) if ntype == REDAND: width = get_type(getnode(nids[1])).width ones = BV((2**width)-1, width) nodemap[nid] = BVComp(getnode(nids[1]), ones) if ntype == CONSTD: width = getnode(nids[0]).width nodemap[nid] = BV(int(nids[1]), width) if ntype == CONST: width = getnode(nids[0]).width nodemap[nid] = BV(bin_to_dec(nids[1]), width) if ntype == STATE: if len(nids) > 1: nodemap[nid] = Symbol(nids[1], getnode(nids[0])) else: nodemap[nid] = Symbol((SN%nid), getnode(nids[0])) ts.add_state_var(nodemap[nid]) if ntype == INPUT: if len(nids) > 1: nodemap[nid] = Symbol(nids[1], getnode(nids[0])) else: nodemap[nid] = Symbol((SN%nid), getnode(nids[0])) ts.add_input_var(nodemap[nid]) if ntype == OUTPUT: # unfortunately we need to create an extra symbol just to have the output name # we could be smarter about this, but then this parser can't be greedy original_symbol = getnode(nids[0]) output_symbol = Symbol(nids[1], original_symbol.get_type()) nodemap[nid] = EqualsOrIff(output_symbol, original_symbol) invarlist.append(nodemap[nid]) node_covered.add(nid) ts.add_output_var(output_symbol) if ntype == AND: nodemap[nid] = binary_op(BVAnd, And, getnode(nids[1]), getnode(nids[2])) if ntype == CONCAT: nodemap[nid] = BVConcat(B2BV(getnode(nids[1])), B2BV(getnode(nids[2]))) if ntype == XOR: nodemap[nid] = binary_op(BVXor, Xor, getnode(nids[1]), getnode(nids[2])) if ntype == XNOR: nodemap[nid] = BVNot(binary_op(BVXor, Xor, getnode(nids[1]), getnode(nids[2]))) if ntype == NAND: bvop = lambda x,y: BVNot(BVAnd(x, y)) bop = lambda x,y: Not(And(x, y)) nodemap[nid] = binary_op(bvop, bop, getnode(nids[1]), getnode(nids[2])) if ntype == IMPLIES: nodemap[nid] = BVOr(BVNot(getnode(nids[1])), getnode(nids[2])) if ntype == NOT: nodemap[nid] = unary_op(BVNot, Not, getnode(nids[1])) if ntype == NEG: nodemap[nid] = unary_op(BVNeg, Not, getnode(nids[1])) if ntype == UEXT: nodemap[nid] = BVZExt(B2BV(getnode(nids[1])), int(nids[2])) if ntype == SEXT: nodemap[nid] = BVSExt(B2BV(getnode(nids[1])), int(nids[2])) if ntype == OR: nodemap[nid] = binary_op(BVOr, Or, getnode(nids[1]), getnode(nids[2])) if ntype == ADD: nodemap[nid] = BVAdd(B2BV(getnode(nids[1])), B2BV(getnode(nids[2]))) if ntype == SUB: nodemap[nid] = BVSub(B2BV(getnode(nids[1])), B2BV(getnode(nids[2]))) if ntype == UGT: nodemap[nid] = BVUGT(B2BV(getnode(nids[1])), B2BV(getnode(nids[2]))) if ntype == UGTE: nodemap[nid] = BVUGE(B2BV(getnode(nids[1])), B2BV(getnode(nids[2]))) if ntype == ULT: nodemap[nid] = BVULT(B2BV(getnode(nids[1])), B2BV(getnode(nids[2]))) if ntype == ULTE: nodemap[nid] = BVULE(B2BV(getnode(nids[1])), B2BV(getnode(nids[2]))) if ntype == SGT: nodemap[nid] = BVSGT(B2BV(getnode(nids[1])), B2BV(getnode(nids[2]))) if ntype == SGTE: nodemap[nid] = BVSGE(B2BV(getnode(nids[1])), B2BV(getnode(nids[2]))) if ntype == SLT: nodemap[nid] = BVSLT(B2BV(getnode(nids[1])), B2BV(getnode(nids[2]))) if ntype == SLTE: nodemap[nid] = BVSLE(B2BV(getnode(nids[1])), B2BV(getnode(nids[2]))) if ntype == EQ: nodemap[nid] = BVComp(B2BV(getnode(nids[1])), B2BV(getnode(nids[2]))) if ntype == NEQ: nodemap[nid] = BVNot(BVComp(getnode(nids[1]), getnode(nids[2]))) if ntype == MUL: nodemap[nid] = BVMul(B2BV(getnode(nids[1])), B2BV(getnode(nids[2]))) if ntype == SLICE: nodemap[nid] = BVExtract(B2BV(getnode(nids[1])), int(nids[3]), int(nids[2])) if ntype == SLL: nodemap[nid] = BVLShl(getnode(nids[1]), getnode(nids[2])) if ntype == SRA: nodemap[nid] = BVAShr(getnode(nids[1]), getnode(nids[2])) if ntype == SRL: nodemap[nid] = BVLShr(getnode(nids[1]), getnode(nids[2])) if ntype == ITE: if (get_type(getnode(nids[2])) == BOOL) or (get_type(getnode(nids[3])) == BOOL): nodemap[nid] = Ite(BV2B(getnode(nids[1])), B2BV(getnode(nids[2])), B2BV(getnode(nids[3]))) else: nodemap[nid] = Ite(BV2B(getnode(nids[1])), getnode(nids[2]), getnode(nids[3])) if ntype == NEXT: if (get_type(getnode(nids[1])) == BOOL) or (get_type(getnode(nids[2])) == BOOL): lval = TS.get_prime(getnode(nids[1])) rval = BV2B(getnode(nids[2])) else: lval = TS.get_prime(getnode(nids[1])) rval = getnode(nids[2]) nodemap[nid] = EqualsOrIff(lval, rval) ftrans.append( (lval, [(TRUE(), rval)]) ) if ntype == INIT: if (get_type(getnode(nids[1])) == BOOL) or (get_type(getnode(nids[2])) == BOOL): nodemap[nid] = EqualsOrIff(BV2B(getnode(nids[1])), BV2B(getnode(nids[2]))) else: nodemap[nid] = EqualsOrIff(getnode(nids[1]), getnode(nids[2])) initlist.append(getnode(nid)) if ntype == CONSTRAINT: nodemap[nid] = BV2B(getnode(nids[0])) invarlist.append(getnode(nid)) if ntype == BAD: nodemap[nid] = getnode(nids[0]) if ASSERTINFO in line: filename_lineno = os.path.basename(nids[3]) assert_name = 'embedded_assertion_%s'%filename_lineno description = "Embedded assertion at line {1} in {0}".format(*filename_lineno.split(COLON_REP)) else: assert_name = 'embedded_assertion_%i'%prop_count description = 'Embedded assertion number %i'%prop_count prop_count += 1 # Following problem format (name, description, strformula) invar_props.append((assert_name, description, Not(BV2B(getnode(nid))))) if nid not in nodemap: Logger.error("Unknown node type \"%s\""%ntype) # get wirename if it exists if ntype not in {STATE, INPUT, OUTPUT, BAD}: # check for wirename, if it's an integer, then it's a node ref try: a = int(nids[-1]) except: try: wire = Symbol(str(nids[-1]), getnode(nids[0])) invarlist.append(EqualsOrIff(wire, B2BV(nodemap[nid]))) ts.add_var(wire) except: pass if Logger.level(1): name = lambda x: str(nodemap[x]) if nodemap[x].is_symbol() else x uncovered = [name(x) for x in nodemap if x not in node_covered] uncovered.sort() if len(uncovered) > 0: Logger.warning("Unlinked nodes \"%s\""%",".join(uncovered)) if not self.symbolic_init: init = simplify(And(initlist)) else: init = TRUE() invar = simplify(And(invarlist)) # instead of trans, we're using the ftrans format -- see below ts.set_behavior(init, TRUE(), invar) # add ftrans for var, cond_assign_list in ftrans: ts.add_func_trans(var, cond_assign_list) hts.add_ts(ts) return (hts, invar_props, ltl_props)
def parse_model(self, \ model_files, relative_path, \ general_config, \ name=None, \ modifier=None): hts = HTS(name if name is not None else "System") invar_props = [] ltl_props = [] models = model_files.split(FILE_SP) cache_files = general_config.cache_files clean_cache = general_config.clean_cache for strfile in models: (strfile, flags) = self.get_file_flags(strfile) if len(strfile) > 1 and strfile[:2] == '~/': filepath = Path.home() / Path(strfile[2:]) else: filepath = Path(strfile) if filepath.parts[0] != "/": filepath = relative_path / filepath filetype = filepath.suffix[1:] parser = None for av_parser in ModelParsersFactory.get_parsers(): assert av_parser.name is not None if filetype in av_parser.get_extensions(): parser = av_parser if not self.parser: self.parser = av_parser if parser is not None: if not filepath.is_file(): Logger.error("File \"%s\" does not exist" % filepath) if cache_files: md5 = self.md5(filepath) cf = "-".join(["1" if general_config.abstract_clock else "0", \ "1" if general_config.add_clock else "0", \ "1" if general_config.boolean else "0"]) cachefile = "%s-%s" % (md5, cf) cachedir = filepath.parent / COSACACHEDIR if cache_files and self._is_cached(cachedir, cachefile, clean_cache): Logger.msg( "Loading from cache file \"%s\"... " % (filepath), 0) (hts_a, inv_a, ltl_a, model_info) = self._from_cache(cachedir, cachefile, general_config, flags) else: Logger.msg("Parsing file \"%s\"... " % (filepath), 0) (hts_a, inv_a, ltl_a) = parser.parse_file(filepath, general_config, flags) model_info = parser.get_model_info() if modifier is not None: modifier(hts_a) if cache_files and not clean_cache: self._to_cache(cachedir, cachefile, hts_a, inv_a, ltl_a, model_info) self.model_info.combine(model_info) hts.combine(hts_a) invar_props += inv_a ltl_props += ltl_a Logger.log("DONE", 0) continue Logger.error( "Filetype \"%s\" unsupported or parser is not available" % filetype) if Logger.level(1): print(hts.print_statistics(name, Logger.level(2))) return (hts, invar_props, ltl_props)
def parse_string(self, strinput): lines = [] pstring = self.parser.parseString(strinput, parseAll=True) hts = HTS("STS") invar_props = [] ltl_props = [] modules = [] modulesdic = {} name = MAIN mainmodule = None for psts in pstring.stss: var_str = [] state_str = [] input_str = [] output_str = [] sub_str = [] par_str = [] init_str = [] trans_str = [] invar_str = [] if len(psts.moduledef) > 0: name = psts.moduledef[1] if len(psts.pardef) > 0: vardefs = psts.pardef for vardef in self._split_list(vardefs, T_CM): varname = vardef[0] vartype = vardef[2] try: vartype = parse_typestr(vartype) par_str.append((varname, vartype)) except UndefinedSymbolError: varpar = vardef[4:-1] par_str.append((varname, vartype, varpar)) dpsts = dict(psts) if P_VARDEFS in dpsts: if self.pyparsing_version == PYPARSING_220: vardefs = list(dict(psts.var)[P_VARDEFS]) else: vardefs = list(dpsts[P_VARDEFS]) for vardef in self._split_list(vardefs, T_SC): varname = vardef[0] if varname[0] == "'": varname = varname[1:-1] vartype = vardef[2] try: vartype = parse_typestr(vartype) var_str.append((varname, vartype)) except UndefinedSymbolError: varpar = vardef[4:-1] sub_str.append( (varname, vartype, self._split_list(varpar, T_CM))) if P_STATEDEFS in dpsts: if self.pyparsing_version == PYPARSING_220: statedefs = list(dict(psts.state)[P_STATEDEFS]) else: statedefs = list(dpsts[P_STATEDEFS]) for statedef in self._split_list(statedefs, T_SC): statename = statedef[0] if statename[0] == "'": statename = statename[1:-1] statetype = parse_typestr(statedef[2]) state_str.append((statename, statetype)) if P_INPUTDEFS in dpsts: if self.pyparsing_version == PYPARSING_220: inputdefs = list(dict(psts.input)[P_INPUTDEFS]) else: inputdefs = list(dpsts[P_INPUTDEFS]) for inputdef in self._split_list(inputdefs, T_SC): inputname = inputdef[0] if inputname[0] == "'": inputname = inputname[1:-1] inputtype = parse_typestr(inputdef[2]) input_str.append((inputname, inputtype)) if P_OUTPUTDEFS in dpsts: if self.pyparsing_version == PYPARSING_220: outputdefs = list(dict(psts.output)[P_OUTPUTDEFS]) else: outputdefs = list(dpsts[P_OUTPUTDEFS]) for outputdef in self._split_list(outputdefs, T_SC): outputname = outputdef[0] if outputname[0] == "'": outputname = outputname[1:-1] outputtype = parse_typestr(outputdef[2]) output_str.append((outputname, outputtype)) if P_INIT in dpsts: if self.pyparsing_version == PYPARSING_220: inits = list(dict(psts.init)[P_FORMULAE]) else: inits = list(dpsts[P_INIT])[1:] for i in range(0, len(inits), 2): init_str.append(inits[i]) if P_TRANS in dpsts: if self.pyparsing_version == PYPARSING_220: transs = list(dict(psts.trans)[P_FORMULAE]) else: transs = list(dpsts[P_TRANS])[1:] for i in range(0, len(transs), 2): trans_str.append(transs[i]) if P_INVAR in dpsts: if self.pyparsing_version == PYPARSING_220: invars = list(dict(psts.invar)[P_FORMULAE]) else: invars = list(dpsts[P_INVAR])[1:] for i in range(0, len(invars), 2): invar_str.append(invars[i]) module = STSModule(name, var_str, state_str, input_str, output_str, par_str, init_str, invar_str, trans_str, sub_str) modules.append(module) if name == MAIN: mainmodule = module else: modulesdic[name] = module hts = self.generate_HTS(mainmodule, modulesdic) hts.flatten() return (hts, invar_props, ltl_props)
def combine_systems(hts, hts2, k, symbolic_init, eqprop=None, inc=True, non_deterministic=False): htseq = HTS("eq") hts1_varnames = [v.symbol_name() for v in hts.vars] hts2_varnames = [v.symbol_name() for v in hts2.vars] map1 = dict([(v, TS.get_prefix_name(v, S1)) for v in hts1_varnames]+\ [(TS.get_prime_name(v), TS.get_prefix_name(TS.get_prime_name(v), S1)) for v in hts1_varnames]) map2 = dict([(v, TS.get_prefix_name(v, S2)) for v in hts2_varnames]+\ [(TS.get_prime_name(v), TS.get_prefix_name(TS.get_prime_name(v), S2)) for v in hts2_varnames]) ts1_init = TRUE() ts2_init = TRUE() if not symbolic_init: ts1_init = substitute(hts.single_init(), map1) ts2_init = substitute(hts2.single_init(), map2) ts1 = TS() ts1.vars = set([TS.get_prefix(v, S1) for v in hts.vars]) ts1.set_behavior(ts1_init,\ substitute(hts.single_trans(), map1),\ substitute(hts.single_invar(), map1)) ts1.state_vars = set([TS.get_prefix(v, S1) for v in hts.state_vars]) ts2 = TS() ts2.vars = set([TS.get_prefix(v, S2) for v in hts2.vars]) ts2.set_behavior(ts2_init,\ substitute(hts2.single_trans(), map2),\ substitute(hts2.single_invar(), map2)) ts2.state_vars = set([TS.get_prefix(v, S2) for v in hts2.state_vars]) htseq.add_ts(ts1) htseq.add_ts(ts2) assumptions = [] lemmas = [] def sets_intersect(set1, set2): for el in set1: if not el in set2: return False return True if hts.assumptions is not None: for assumption in hts.assumptions: assumptions.append(assumption) if hts.lemmas is not None: for lemma in hts.lemmas: lemmas.append(lemma) if hts2.assumptions is not None: for assumption in hts2.assumptions: assumptions.append(assumption) if hts2.lemmas is not None: for lemma in hts2.lemmas: lemmas.append(lemma) for assumption in assumptions: fv_assumption = get_free_variables(assumption) c_assumption = TRUE() if sets_intersect(fv_assumption, hts.vars): c_assumption = And(c_assumption, substitute(assumption, map1)) if sets_intersect(fv_assumption, hts2.vars): c_assumption = And(c_assumption, substitute(assumption, map2)) if c_assumption != TRUE(): htseq.add_assumption(c_assumption) for lemma in lemmas: fv_lemma = get_free_variables(lemma) c_lemma = TRUE() if sets_intersect(fv_lemma, hts.vars): c_lemma = And(c_lemma, substitute(lemma, map1)) if sets_intersect(fv_lemma, hts2.vars): c_lemma = And(c_lemma, substitute(lemma, map2)) if c_lemma != TRUE(): htseq.add_lemma(c_lemma) miter_out = Symbol(EQS, BOOL) inputs = hts.input_vars.intersection(hts2.input_vars) outputs = hts.output_vars.intersection(hts2.output_vars) htseq.input_vars = set([ TS.get_prefix(v, S1) for v in hts.input_vars ]).union(set([TS.get_prefix(v, S2) for v in hts2.input_vars])) htseq.output_vars = set([ TS.get_prefix(v, S1) for v in hts.output_vars ]).union(set([TS.get_prefix(v, S2) for v in hts2.output_vars])) if symbolic_init or (not non_deterministic): states = hts.state_vars.intersection(hts2.state_vars) else: states = [] eqinputs = TRUE() eqoutputs = TRUE() eqstates = TRUE() for inp in inputs: eqinputs = And( eqinputs, EqualsOrIff(TS.get_prefix(inp, S1), TS.get_prefix(inp, S2))) for out in outputs: eqoutputs = And( eqoutputs, EqualsOrIff(TS.get_prefix(out, S1), TS.get_prefix(out, S2))) for svar in states: eqstates = And( eqstates, EqualsOrIff(TS.get_prefix(svar, S1), TS.get_prefix(svar, S2))) if eqprop is None: if symbolic_init or (not non_deterministic): invar = And(eqinputs, Iff(miter_out, Implies(eqstates, eqoutputs))) else: invar = And(eqinputs, Iff(miter_out, eqoutputs)) Logger.log('Inferring equivalence property: {}'.format(invar), 2) else: sparser = StringParser() eqprop = sparser.parse_formulae(eqprop) if len(eqprop) > 1: Logger.error("Expecting a single equivalence property") eqprop = eqprop[0][1] invar = Iff(miter_out, eqprop) Logger.log('Using provided equivalence property: {}'.format(invar), 2) tsmo = TS() tsmo.vars = set([miter_out]) tsmo.invar = invar htseq.add_ts(tsmo) return (htseq, miter_out)
def parse_string(self, lines): [none, var, state, input, output, init, invar, trans, ftrans] = range(9) section = none inits = TRUE() invars = TRUE() transs = TRUE() ftranss = {} sparser = StringParser() count = 0 vars = set([]) states = set([]) inputs = set([]) outputs = set([]) invar_props = [] ltl_props = [] for line in lines: count += 1 if (line.strip() in ["", "\n"]) or line[0] == T_COM: continue if T_VAR == line[:len(T_VAR)]: section = var continue if T_STATE == line[:len(T_STATE)]: section = state continue if T_INPUT == line[:len(T_INPUT)]: section = input continue if T_OUTPUT == line[:len(T_OUTPUT)]: section = output continue if T_INIT == line[:len(T_INIT)]: section = init continue if T_INVAR == line[:len(T_INVAR)]: section = invar continue if T_TRANS == line[:len(T_TRANS)]: section = trans continue if T_FTRANS == line[:len(T_FTRANS)]: section = ftrans continue if section in [var, state, input, output]: varname, vartype = line[:-2].replace(" ", "").split(":") if varname[0] == "'": varname = varname[1:-1] vartype = parse_typestr(vartype) vardef = self._define_var(varname, vartype) vars.add(vardef) if section == state: states.add(vardef) if section == input: inputs.add(vardef) if section == output: outputs.add(vardef) if section in [init, invar, trans]: line = line.replace(T_SC, "").strip() qline = quote_names(line, replace_ops=False) if section == init: inits = And(inits, sparser.parse_formula(qline)) if section == invar: invars = And(invars, sparser.parse_formula(qline)) if section == trans: transs = And(transs, sparser.parse_formula(qline)) if section == ftrans: strvar = line[:line.find(":=")] var = sparser.parse_formula( quote_names(strvar, replace_ops=False)) cond_ass = line[line.find(":=") + 2:].strip() ftranss[var] = [] for cond_as in cond_ass.split("{"): if cond_as == "": continue cond = cond_as[:cond_as.find(",")] ass = cond_as[cond_as.find(",") + 1:cond_as.find("}")] ftranss[var].append((sparser.parse_formula( quote_names(cond, replace_ops=False)), sparser.parse_formula( quote_names(ass, replace_ops=False)))) hts = HTS("STS") ts = TS() ts.vars = vars ts.state_vars = states ts.input_vars = inputs ts.output_vars = outputs ts.init = inits ts.invar = invars ts.trans = transs ts.ftrans = ftranss hts.add_ts(ts) return (hts, invar_props, ltl_props)
def compute(self, hts, prop): Logger.log("Building COI", 1) self._build_var_deps(hts) coi_vars = set(self._free_variables(prop)) if (len(coi_vars) < 1) or (self.var_deps == {}): return hts if hts.assumptions is not None: for assumption in hts.assumptions: for v in self._free_variables(assumption): coi_vars.add(v) if hts.lemmas is not None: for lemma in hts.lemmas: for v in self._free_variables(lemma): coi_vars.add(v) coits = TS("COI") coi_vars = list(coi_vars) i = 0 visited = set([]) while i < len(coi_vars): var = coi_vars[i] if (var in visited) or (var not in self.var_deps): i += 1 continue coi_vars = coi_vars[:i + 1] + list( self.var_deps[var]) + coi_vars[i + 1:] visited.add(var) i += 1 coi_vars = frozenset(coi_vars) trans = list( conjunctive_partition(hts.single_trans(include_ftrans=True))) invar = list( conjunctive_partition(hts.single_invar(include_ftrans=True))) init = list(conjunctive_partition(hts.single_init())) coits.trans = [ f for f in trans if self._intersect(coi_vars, self._free_variables(f)) ] coits.invar = [ f for f in invar if self._intersect(coi_vars, self._free_variables(f)) ] coits.init = [ f for f in init if self._intersect(coi_vars, self._free_variables(f)) ] Logger.log("COI statistics:", 1) Logger.log(" Vars: %s -> %s" % (len(hts.vars), len(coi_vars)), 1) Logger.log(" Init: %s -> %s" % (len(init), len(coits.init)), 1) Logger.log(" Invar: %s -> %s" % (len(invar), len(coits.invar)), 1) Logger.log(" Trans: %s -> %s" % (len(trans), len(coits.trans)), 1) coits.trans = And(coits.trans) coits.invar = And(coits.invar) coits.init = And(coits.init) coits.vars = set([]) for bf in [init, invar, trans]: for f in bf: for v in self._free_variables(f): coits.vars.add(v) coits.input_vars = set([v for v in coi_vars if v in hts.input_vars]) coits.output_vars = set([v for v in coi_vars if v in hts.output_vars]) coits.state_vars = set([v for v in coi_vars if v in hts.state_vars]) new_hts = HTS("COI") new_hts.add_ts(coits) if self.save_model: printer = HTSPrintersFactory.printer_by_name("STS") with open("/tmp/coi_model.ssts", "w") as f: f.write(printer.print_hts(new_hts, [])) return new_hts