def visit_Name(self, cost): # See if this is a constant-size relation. if cost.name in const_rels: return Unit() # Retrieve symbol. sym = symtab.get_symbols().get(cost.name, None) if sym is None: return cost # Retrieve element type. t = sym.type if t is None: return cost t = t.join(T.Set(T.Bottom)) if not t.issmaller(T.Set(T.Top)): return cost elt_t = t.elt # Convert to cost. new_cost = type_to_cost(elt_t) new_cost = normalize(new_cost) if not isinstance(new_cost, Unknown): cost = new_cost return cost
class RelationSymbol(TypedSymbolMixin, Symbol): counted = SymbolAttribute( doc='Allow duplicates, associate a count with each element', default=False, parser=parse_bool) lru = SymbolAttribute(doc='Support LRU operations', default=False, parser=parse_bool) min_type = T.Set(T.Bottom) max_type = T.Set(T.Top) def __str__(self): s = 'Relation {}'.format(self.name) opts = [] if self.type is not None: opts.append('type: {}'.format(self.type)) if len(opts) > 0: s += ' (' + ', '.join(opts) + ')' return s @property def decl_constructor(self): if self.lru: return 'LRUSet' elif self.counted: return 'CSet' else: return 'Set'
def make_auxmap_type(auxmapinv, reltype): """Given a mask and a relation type, determine the corresponding auxiliary map type. We obtain by lattice join the smallest relation type that is at least as big as the given relation type and that has the correct arity. This should have the form {(T1, ..., Tn)}. The map type is then from a tuple of some Ts to a set of tuples of the remaining Ts. If no such type exists, e.g. if the given relation type is {Top} or a set of tuples of incorrect arity, we instead give the map type {Top: Top}. """ mask = auxmapinv.mask arity = len(mask.m) bottom_reltype = T.Set(T.Tuple([T.Bottom] * arity)) top_reltype = T.Set(T.Tuple([T.Top] * arity)) norm_type = reltype.join(bottom_reltype) well_typed = norm_type.issmaller(top_reltype) if well_typed: assert (isinstance(norm_type, T.Set) and isinstance(norm_type.elt, T.Tuple) and len(norm_type.elt.elts) == arity) t_bs, t_us = L.split_by_mask(mask, norm_type.elt.elts) t_key = t_bs[0] if auxmapinv.unwrap_key else T.Tuple(t_bs) t_value = t_us[0] if auxmapinv.unwrap_value else T.Tuple(t_us) map_type = T.Map(t_key, T.Set(t_value)) else: map_type = T.Map(T.Top, T.Top) return map_type
def rewrite_comp(self, symbol, name, comp): # No effect if it's already a tuple. if isinstance(comp.resexp, L.Tuple): return affected_queries.add(name) comp = comp._replace(resexp=L.Tuple([comp.resexp])) t = symbol.type t = t.join(T.Set(T.Bottom)) assert t.issmaller(T.Set(T.Top)) symbol.type = T.Set(T.Tuple([t.elt])) return comp
class MapSymbol(TypedSymbolMixin, Symbol): min_type = T.Map(T.Bottom, T.Bottom) max_type = T.Map(T.Top, T.Top) def __str__(self): s = 'Map {}'.format(self.name) if self.type is not None: s += ' (type: {})'.format(self.type) return s decl_constructor = 'Map'
def unwrap_singletons(tree, symtab): """Rewrite relations that use singleton tuple types so that the tuple contents are unpacked. Return the modified tree and a list of names of unwrapped relations. Modify types of symbols in the symbol table. This will change the types of relations, and add packing and unpacking operations at their uses, which should be eliminated by a follow-up optimization. """ relations = symtab.get_relations() sing_rels = set() for relsym in relations.values(): t = relsym.type if (isinstance(t, T.Set) and isinstance(t.elt, T.Tuple) and len(t.elt.elts) == 1): sing_rels.add(relsym) sing_rel_names = {rel.name for rel in sing_rels} tree = SingletonUnwrapper.run(tree, symtab.fresh_names.vars, sing_rel_names) for rel in sing_rels: rel.type = T.Set(rel.type.elt.elts[0]) return tree, sing_rel_names
def make_demand_query(symtab, query, left_clauses): """Create a demand query, update the query's demand_query attribute, and return the new demand query symbol. """ ct = symtab.clausetools demquery_name = N.get_query_demand_query_name(query.name) demquery_tuple = L.tuplify(query.demand_params) demquery_tuple_type = symtab.analyze_expr_type(demquery_tuple) demquery_type = T.Set(demquery_tuple_type) demquery_comp = L.Comp(demquery_tuple, left_clauses) prefix = next(symtab.fresh_names.vars) demquery_comp = ct.comp_rename_lhs_vars(demquery_comp, lambda x: prefix + x) demquery_sym = symtab.define_query(demquery_name, type=demquery_type, node=demquery_comp, impl=query.impl) query.demand_query = demquery_name return demquery_sym
def make_setfrommap_type(mask, maptype): """Given a mask and a map type, determine the corresponding relation type. We obtain by lattice join the smallest map type that is at least as big as the given map type and that has the correct key tuple arity. This should have the form {(K1, ..., Kn): V}. The relation type is then a set of tuples of these types interleaved according to the mask. If no such type exists, e.g. if the given relation type is {Top: Top} or the key is not a tuple of correct arity, we instead give the relation type {Top}. """ nb = mask.m.count('b') assert mask.m.count('u') == 1 bottom_maptype = T.Map(T.Tuple([T.Bottom] * nb), T.Bottom) top_maptype = T.Map(T.Tuple([T.Top] * nb), T.Top) norm_type = maptype.join(bottom_maptype) well_typed = norm_type.issmaller(top_maptype) if well_typed: assert (isinstance(norm_type, T.Map) and isinstance(norm_type.key, T.Tuple) and len(norm_type.key.elts) == nb) t_elts = L.combine_by_mask(mask, norm_type.key.elts, [norm_type.value]) rel_type = T.Set(T.Tuple(t_elts)) else: rel_type = T.Set(T.Top) return rel_type
def get_rel_type(symtab, rel): """Helper for returning a relation's element type.""" # This helper is used below, but it should probably be refactored # into a general helper in the type subpackage. relsym = symtab.get_symbols().get(rel, None) if relsym is None: raise L.TransformationError( 'No symbol info for operand relation {}'.format(rel)) t_rel = relsym.type t_rel = t_rel.join(T.Set(T.Bottom)) if not t_rel.issmaller(T.Set(T.Top)): raise L.ProgramError('Bad type for relation {}: {}'.format(rel, t_rel)) # Treat Set<Bottom> as a set of singleton tuples. if t_rel.elt is T.Bottom: raise L.ProgramError( 'Relation must have known tuple element type ' 'before it can be used in aggregate: {}'.format(rel)) return t_rel
def parse_typedef(s, symtab=None): typedefs = {} lines = [line for line in s.split(';') if line and not line.isspace()] for line in lines: name, definition = line.split('=') name = name.strip() t = T.eval_typestr(definition, typedefs) typedefs[name] = t return typedefs
def incrementalize_aggr(tree, symtab, query, result_var): # Form the invariant. aggrinv = aggrinv_from_query(symtab, query, result_var) handler = aggrinv.get_handler() # Transform to maintain it. trans = AggrMaintainer(symtab.fresh_names.vars, aggrinv) tree = trans.process(tree) symtab.maint_funcs.update(trans.maint_funcs) # Transform occurrences of the aggregate. zero = None if aggrinv.uses_demand else handler.make_zero_expr() state_expr = L.DictLookup(L.Name(aggrinv.map), L.tuplify(aggrinv.params), zero) lookup_expr = handler.make_projection_expr(state_expr) class AggrExpander(S.QueryRewriter): expand = True def rewrite_aggr(self, symbol, name, expr): if name == query.name: return lookup_expr tree = AggrExpander.run(tree, symtab) # Determine the result map's type and define its symbol. t_rel = get_rel_type(symtab, aggrinv.rel) btypes, _ = L.split_by_mask(aggrinv.mask, t_rel.elt.elts) t_key = T.Tuple(btypes) t_val = handler.result_type(t_rel) t_map = T.Map(t_key, t_val) symtab.define_map(aggrinv.map, type=t_map) symtab.stats['aggrs_transformed'] += 1 return tree
def run_type_inference(self, tree): """Run type inference over the program, using each symbol's type attribute as a starting point. Return a list of nodes where well-typedness is violated, and a list of variables whose max type is exceeded. """ store, fixed_vars = self.get_type_store() store, illtyped = T.analyze_types(tree, store, fixed_vars) # Write back non-query symbol types. badsyms = set() for name, type in store.items(): sym = self.symbols[name] sym.type = type if not type.issmaller(sym.max_type): badsyms.add(sym) # Write back query symbol types. for sym in self.get_queries().values(): type = T.analyze_expr_type(sym.node, store) sym.type = type return illtyped, badsyms
def make_demand_set(symtab, query): """Create a demand set, update the query's demand_set attribute, and return the new demand set symbol. """ uset_name = N.get_query_demand_set_name(query.name) uset_tuple = L.tuplify(query.demand_params) uset_tuple_type = symtab.analyze_expr_type(uset_tuple) uset_type = T.Set(uset_tuple_type) maxsize = query.demand_set_maxsize uset_lru = maxsize is not None and maxsize > 1 uset_sym = symtab.define_relation(uset_name, type=uset_type, lru=uset_lru) query.demand_set = uset_name return uset_sym
def visit_IndefImgset(self, cost): # Check for constant-time relations. if cost.rel in const_rels: return Unit() # Field lookups are constant time. if N.is_F(cost.rel) and cost.mask == L.mask('bu'): return Unit() sym = symtab.get_symbols().get(cost.rel, None) if sym is None: return cost # Get types for unbound components. t = sym.type if t is None: return cost if not (isinstance(t, T.Set) and isinstance(t.elt, T.Tuple) and len(t.elt.elts) == len(cost.mask.m)): return cost mask = cost.mask elts = t.elt.elts # Process out aggregate SetFromMap result components, # which are functionally determined by the map keys. if N.is_SA(cost.rel) and mask.m[-1] == 'u': mask = mask._replace(m=mask.m[:-1]) elts = elts[:-1] _b_elts, u_elts = L.split_by_mask(mask, elts) new_cost = type_to_cost(T.Tuple(u_elts)) new_cost = normalize(new_cost) if not isinstance(new_cost, Unknown): cost = new_cost return cost
def eval_typestr(s, symtab): return T.eval_typestr(s, symtab.config.typedefs)
def make_wrap_type(wrapinv, opertype): """Given an operand type, determine the corresponding wrap or unwrap type. """ if wrapinv.unwrap: top_opertype = T.Set(T.Tuple([T.Top])) bottom_opertype = T.Set(T.Tuple([T.Bottom])) norm_type = opertype.join(bottom_opertype) well_typed = norm_type.issmaller(top_opertype) if well_typed: assert (isinstance(norm_type, T.Set) and isinstance(norm_type.elt, T.Tuple) and len(norm_type.elt.elts) == 1) return T.Set(norm_type.elt.elts[0]) else: return T.Set(T.Top) else: top_opertype = T.Set(T.Top) bottom_opertype = T.Set(T.Bottom) norm_type = opertype.join(bottom_opertype) well_typed = norm_type.issmaller(top_opertype) if well_typed: assert isinstance(norm_type, T.Set) return T.Set(T.Tuple([norm_type.elt])) else: return T.Set(T.Top)
def analyze_expr_type(self, expr): store, _fixed_vars = self.get_type_store() return T.analyze_expr_type(expr, store)
def result_type(self, t_oper): t_oper = t_oper.join(T.Set(T.Bottom)) assert t_oper.issmaller(T.Set(T.Top)) return t_oper.elt