def check_int(self, ast_node): comp_ranges = ast_node.comp_ranges if len(comp_ranges) > 1: error_idx = self.declare_error("Unsupported LHS Pattern: Comprehension pattern with multiple comprehension range.") for comp_range in comp_ranges: self.extend_error(error_idx, comp_range) else: for comp_range in comp_ranges: self.check_int(comp_range) inspect = Inspector() fact_bases = inspect.get_base_facts( ast_node.facts ) if len(fact_bases) != 1: error_idx = self.declare_error("Unsupported LHS Pattern: Comprehension pattern with multiple fact patterns.") for f in ast_node.facts: self.extend_error(error_idx, f)
def check_int(self, ast_node): comp_ranges = ast_node.comp_ranges if len(comp_ranges) > 1: error_idx = self.declare_error( "Unsupported LHS Pattern: Comprehension pattern with multiple comprehension range." ) for comp_range in comp_ranges: self.extend_error(error_idx, comp_range) else: for comp_range in comp_ranges: self.check_int(comp_range) inspect = Inspector() fact_bases = inspect.get_base_facts(ast_node.facts) if len(fact_bases) != 1: error_idx = self.declare_error( "Unsupported LHS Pattern: Comprehension pattern with multiple fact patterns." ) for f in ast_node.facts: self.extend_error(error_idx, f)
def check_int(self, ast_node): comp_ranges = ast_node.comp_ranges if len(comp_ranges) > 1: error_idx = self.declare_error("Unsupported LHS Pattern: Comprehension pattern with multiple comprehension range.") for comp_range in comp_ranges: self.extend_error(error_idx, comp_range) else: for comp_range in comp_ranges: self.check_int(comp_range) inspect = Inspector() fact_bases = inspect.get_base_facts( ast_node.facts ) if len(fact_bases) != 1: error_idx = self.declare_error("Unsupported LHS Pattern: Comprehension pattern with multiple fact patterns.") for f in ast_node.facts: self.extend_error(error_idx, f) else: self.check_int( ast_node.facts[0] ) if (len(ast_node.facts) == 1) and (len(comp_ranges) == 1): loc = ast_node.facts[0].loc if loc.name in map(lambda v: v.name,inspect.free_vars( comp_ranges[0].term_vars )): error_idx = self.declare_error("Unsupported LHS Pattern: Multi-location comprehension patterns.") self.extend_error(error_idx, loc)
def int_check(self, ast_node): inspect = Inspector() decs = ast_node.decs simplified_pred_names = {} non_local_pred_names = {} lhs_compre_pred_names = {} prioritized_pred_names = {} for rule_dec in inspect.filter_decs(decs, rule=True): rule_head_locs = {} simp_heads = rule_dec.slhs prop_heads = rule_dec.plhs rule_body = rule_dec.rhs # Scan for simplified predicate names for fact in inspect.get_base_facts( simp_heads ): simplified_pred_names[ fact.name ] = () # Scan for non local predicate names # Annotates non local rule body facts as well. loc_var_terms = inspect.free_vars( simp_heads+prop_heads, args=False ) loc_vars = map(lambda t: t.name, loc_var_terms) if len(set(loc_vars)) > 1: # Flag all body predicates as non local for fact in inspect.get_base_facts( rule_body ): non_local_pred_names[ fact.name ] = () fact.local = False else: loc_var = loc_vars[0] (bfs,lfs,lfcs,comps) = inspect.partition_rule_heads( rule_body ) for lf in lfs: if isinstance(lf.loc, ast.TermVar): if lf.loc.name != loc_var: non_local_pred_names[ lf.fact.name ] = () lf.fact.local = False else: # Location is not variable, hence treat as non-local non_local_pred_names[ lf.fact.name ] = () lf.fact.local = False for lfc in lfcs: if isinstance(lfc.loc, ast.TermVar): if lfc.loc.name != loc_var: for f in lfc.facts: non_local_pred_names[ f.name ] = () f.local = False else: # Location is not variable, hence treat as non-local for f in lfc.facts: non_local_pred_names[ f.name ] = () f.local = False for comp in comps: # Assumes that comprehension fact patterns are solo loc_fact = comp.facts[0] if loc_fact.loc.name != loc_var: non_local_pred_names[ loc_fact.loc.name ] = () loc_fact.fact.local = False else: if loc_var in map(lambda tv: tv.name, inspect.free_vars( comp.comp_ranges[0].term_vars )): non_local_pred_name[ loc_fact.loc.name ] = () loc_fact.fact.local = False # Scan for LHS comprehension predicate names (bfs,lfs,lfcs,comps) = inspect.partition_rule_heads( simp_heads + prop_heads ) for comp in comps: loc_fact = comp.facts[0] lhs_compre_pred_names[ loc_fact.fact.name ] = () # Scan for non-unique rule heads rule_head_pred_names = {} for fact in inspect.get_base_facts( simp_heads + prop_heads ): if fact.name not in rule_head_pred_names: rule_head_pred_names[fact.name] = [fact] else: rule_head_pred_names[fact.name].append( fact ) self.rule_unique_heads[ rule_dec.name ] = [] collision_idx = 0 for name in rule_head_pred_names: facts = rule_head_pred_names[name] unique_head = len(facts) == 1 for fact in facts: fact.unique_head = unique_head fact.collision_idx = collision_idx collision_idx += 1 if unique_head: self.rule_unique_heads[rule_dec.name].append( name ) # Scan for priorities self.rule_priority_body[ rule_dec.name ] = {} (bfs,lfs,lfcs,comps) = inspect.partition_rule_heads( rule_body ) for bf in bfs: if bf.priority != None: prioritized_pred_names[ bf.name ] = () self.rule_priority_body[ rule_dec.name ][ bf.name ] = () for lf in lfs: if lf.priority != None: prioritized_pred_names[ lf.fact.name ] = () self.rule_priority_body[ rule_dec.name ][ lf.fact.name ] = () for lfc in lfcs: if lfc.priority != None: for f in lfc.facts: prioritized_pred_names[ f.name ] = () self.rule_priority_body[ rule_dec.name ][ f.name ] = () for comp in comps: if comp.priority != None: for f in comp.facts: prioritized_pred_names[ f.name ] = () self.rule_priority_body[ rule_dec.name ][ f.name ] = () # Annotate fact declaration nodes with relevant information fact_decs = inspect.filter_decs(decs, fact=True) for fact_dec in fact_decs: fact_dec.persistent = fact_dec.name not in simplified_pred_names fact_dec.local = fact_dec.name not in non_local_pred_names fact_dec.monotone = fact_dec.name not in lhs_compre_pred_names fact_dec.uses_priority = fact_dec.name in prioritized_pred_names self.fact_decs = fact_decs # Annotate rule declaration nodes with relevant information rule_decs = inspect.filter_decs(decs, rule=True) for rule_dec in rule_decs: rule_dec.unique_head_names = self.rule_unique_heads[ rule_dec.name ] rule_dec.rule_priority_body_names = self.rule_priority_body[ rule_dec.name ].keys() # Annotate RHS constraints with monotonicity information for rule_dec in rule_decs: rule_body = rule_dec.rhs for fact in inspect.get_base_facts( rule_body ): fact.monotone = fact.name not in lhs_compre_pred_names
def int_check(self, ast_node): inspect = Inspector() decs = ast_node.decs simplified_pred_names = {} non_local_pred_names = {} lhs_compre_pred_names = {} prioritized_pred_names = {} for rule_dec in inspect.filter_decs(decs, rule=True): rule_head_locs = {} simp_heads = rule_dec.slhs prop_heads = rule_dec.plhs rule_body = rule_dec.rhs # Scan for simplified predicate names for fact in inspect.get_base_facts(simp_heads): simplified_pred_names[fact.name] = () # Scan for non local predicate names # Annotates non local rule body facts as well. loc_var_terms = inspect.free_vars(simp_heads + prop_heads, args=False) loc_vars = map(lambda t: t.name, loc_var_terms) if len(set(loc_vars)) > 1: # Flag all body predicates as non local for fact in inspect.get_base_facts(rule_body): non_local_pred_names[fact.name] = () fact.local = False else: loc_var = loc_vars[0] (bfs, lfs, lfcs, comps) = inspect.partition_rule_heads(rule_body) for lf in lfs: if isinstance(lf.loc, ast.TermVar): if lf.loc.name != loc_var: non_local_pred_names[lf.fact.name] = () lf.fact.local = False else: # Location is not variable, hence treat as non-local non_local_pred_names[lf.fact.name] = () lf.fact.local = False for lfc in lfcs: if isinstance(lfc.loc, ast.TermVar): if lfc.loc.name != loc_var: for f in lfc.facts: non_local_pred_names[f.name] = () f.local = False else: # Location is not variable, hence treat as non-local for f in lfc.facts: non_local_pred_names[f.name] = () f.local = False for comp in comps: # Assumes that comprehension fact patterns are solo loc_fact = comp.facts[0] if loc_fact.loc.name != loc_var: non_local_pred_names[loc_fact.loc.name] = () loc_fact.fact.local = False else: if loc_var in map( lambda tv: tv.name, inspect.free_vars( comp.comp_ranges[0].term_vars)): non_local_pred_name[loc_fact.loc.name] = () loc_fact.fact.local = False # Scan for LHS comprehension predicate names (bfs, lfs, lfcs, comps) = inspect.partition_rule_heads(simp_heads + prop_heads) for comp in comps: loc_fact = comp.facts[0] lhs_compre_pred_names[loc_fact.fact.name] = () # Scan for non-unique rule heads rule_head_pred_names = {} for fact in inspect.get_base_facts(simp_heads + prop_heads): if fact.name not in rule_head_pred_names: rule_head_pred_names[fact.name] = [fact] else: rule_head_pred_names[fact.name].append(fact) self.rule_unique_heads[rule_dec.name] = [] collision_idx = 0 for name in rule_head_pred_names: facts = rule_head_pred_names[name] unique_head = len(facts) == 1 for fact in facts: fact.unique_head = unique_head fact.collision_idx = collision_idx collision_idx += 1 if unique_head: self.rule_unique_heads[rule_dec.name].append(name) # Scan for priorities self.rule_priority_body[rule_dec.name] = {} (bfs, lfs, lfcs, comps) = inspect.partition_rule_heads(rule_body) for bf in bfs: if bf.priority != None: prioritized_pred_names[bf.name] = () self.rule_priority_body[rule_dec.name][bf.name] = () for lf in lfs: if lf.priority != None: prioritized_pred_names[lf.fact.name] = () self.rule_priority_body[rule_dec.name][lf.fact.name] = () for lfc in lfcs: if lfc.priority != None: for f in lfc.facts: prioritized_pred_names[f.name] = () self.rule_priority_body[rule_dec.name][f.name] = () for comp in comps: if comp.priority != None: for f in comp.facts: prioritized_pred_names[f.name] = () self.rule_priority_body[rule_dec.name][f.name] = () # Annotate fact declaration nodes with relevant information fact_decs = inspect.filter_decs(decs, fact=True) for fact_dec in fact_decs: fact_dec.persistent = fact_dec.name not in simplified_pred_names fact_dec.local = fact_dec.name not in non_local_pred_names fact_dec.monotone = fact_dec.name not in lhs_compre_pred_names fact_dec.uses_priority = fact_dec.name in prioritized_pred_names self.fact_decs = fact_decs # Annotate rule declaration nodes with relevant information rule_decs = inspect.filter_decs(decs, rule=True) for rule_dec in rule_decs: rule_dec.unique_head_names = self.rule_unique_heads[rule_dec.name] rule_dec.rule_priority_body_names = self.rule_priority_body[ rule_dec.name].keys() # Annotate RHS constraints with monotonicity information for rule_dec in rule_decs: rule_body = rule_dec.rhs for fact in inspect.get_base_facts(rule_body): fact.monotone = fact.name not in lhs_compre_pred_names