def finalize(self): inferencer = StitchingInferencer(engine=self.engine, value_map=self.value_map, quote=self._quote) hasher = TypedtreeHasher() # Iterate inference to fixed point. old_typedtree_hash = None while True: inferencer.visit(self.typedtree) typedtree_hash = hasher.visit(self.typedtree) if old_typedtree_hash == typedtree_hash: break old_typedtree_hash = typedtree_hash # When we have an excess of type information, sometimes we can infer every type # in the AST without discovering every referenced attribute of host objects, so # do one last pass unconditionally. inferencer.visit(self.typedtree) # After we have found all functions, synthesize a module to hold them. source_buffer = source.Buffer("", "<synthesized>") self.typedtree = asttyped.ModuleT( typing_env=self.globals, globals_in_scope=set(), body=self.typedtree, loc=source.Range(source_buffer, 0, 0))
def _add(self, fragment): range_from = len(self.source) self.source += fragment range_to = len(self.source) return source.Range(self.source_buffer, range_from, range_to, expanded_from=self.expanded_from)
def finalize(self): inferencer = StitchingInferencer(engine=self.engine, value_map=self.value_map, quote=self._quote) typedtree_hasher = TypedtreeHasher() # Iterate inference to fixed point. old_typedtree_hash = None old_attr_count = None while True: inferencer.visit(self.typedtree) typedtree_hash = typedtree_hasher.visit(self.typedtree) attr_count = self.embedding_map.attribute_count() if old_typedtree_hash == typedtree_hash and old_attr_count == attr_count: break old_typedtree_hash = typedtree_hash old_attr_count = attr_count # After we've discovered every referenced attribute, check if any kernel_invariant # specifications refers to ones we didn't encounter. for host_type in self.embedding_map.type_map: instance_type, constructor_type = self.embedding_map.type_map[host_type] if not hasattr(instance_type, "constant_attributes"): # Exceptions lack user-definable attributes. continue for attribute in instance_type.constant_attributes: if attribute in instance_type.attributes: # Fast path; if the ARTIQ Python type has the attribute, then every observed # value is guaranteed to have it too. continue for value, loc in self.value_map[instance_type]: if hasattr(value, attribute): continue diag = diagnostic.Diagnostic("warning", "object {value} of type {typ} declares attribute '{attr}' as " "kernel invariant, but the instance referenced here does not " "have this attribute", {"value": repr(value), "typ": types.TypePrinter().name(instance_type, max_depth=0), "attr": attribute}, loc) self.engine.process(diag) # After we have found all functions, synthesize a module to hold them. source_buffer = source.Buffer("", "<synthesized>") self.typedtree = asttyped.ModuleT( typing_env=self.globals, globals_in_scope=set(), body=self.typedtree, loc=source.Range(source_buffer, 0, 0))
def _function_loc(self, function): if isinstance(function, SpecializedFunction): function = function.host_function if hasattr(function, 'artiq_embedded') and function.artiq_embedded.function: function = function.artiq_embedded.function if isinstance(function, str): return source.Range(source.Buffer(function, "<string>"), 0, 0) filename = function.__code__.co_filename line = function.__code__.co_firstlineno name = function.__code__.co_name source_line = linecache.getline(filename, line).lstrip() while source_line.startswith("@") or source_line == "": line += 1 source_line = linecache.getline(filename, line).lstrip() if "<lambda>" in function.__qualname__: column = 0 # can't get column of lambda else: column = re.search("def", source_line).start(0) source_buffer = source.Buffer(source_line, filename, line) return source.Range(source_buffer, column, column)
def _function_loc(self, function): filename = function.__code__.co_filename line = function.__code__.co_firstlineno name = function.__code__.co_name source_line = linecache.getline(filename, line).lstrip() while source_line.startswith("@") or source_line == "": line += 1 source_line = linecache.getline(filename, line).lstrip() if "<lambda>" in function.__qualname__: column = 0 # can't get column of lambda else: column = re.search("def", source_line).start(0) source_buffer = source.Buffer(source_line, filename, line) return source.Range(source_buffer, column, column)
def finalize(self): inferencer = StitchingInferencer(engine=self.engine, value_map=self.value_map, quote=self._quote) hasher = TypedtreeHasher() # Iterate inference to fixed point. old_typedtree_hash = None while True: inferencer.visit(self.typedtree) typedtree_hash = hasher.visit(self.typedtree) if old_typedtree_hash == typedtree_hash: break old_typedtree_hash = typedtree_hash # After we have found all functions, synthesize a module to hold them. source_buffer = source.Buffer("", "<synthesized>") self.typedtree = asttyped.ModuleT(typing_env=self.globals, globals_in_scope=set(), body=self.typedtree, loc=source.Range( source_buffer, 0, 0))
def _quote_embedded_function(self, function, flags): if isinstance(function, SpecializedFunction): host_function = function.host_function else: host_function = function if not hasattr(host_function, "artiq_embedded"): raise ValueError("{} is not an embedded function".format(repr(host_function))) # Extract function source. embedded_function = host_function.artiq_embedded.function source_code = inspect.getsource(embedded_function) filename = embedded_function.__code__.co_filename module_name = embedded_function.__globals__['__name__'] first_line = embedded_function.__code__.co_firstlineno # Extract function annotation. signature = inspect.signature(embedded_function) loc = self._function_loc(embedded_function) arg_types = OrderedDict() optarg_types = OrderedDict() for param in signature.parameters.values(): if param.kind == inspect.Parameter.VAR_POSITIONAL or \ param.kind == inspect.Parameter.VAR_KEYWORD: diag = diagnostic.Diagnostic("error", "variadic arguments are not supported; '{argument}' is variadic", {"argument": param.name}, self._function_loc(function), notes=self._call_site_note(loc, fn_kind='kernel')) self.engine.process(diag) arg_type = self._type_of_param(function, loc, param, fn_kind='kernel') if param.default is inspect.Parameter.empty: arg_types[param.name] = arg_type else: optarg_types[param.name] = arg_type if signature.return_annotation is not inspect.Signature.empty: ret_type = self._extract_annot(function, signature.return_annotation, "return type", loc, fn_kind='kernel') else: ret_type = types.TVar() # Extract function environment. host_environment = dict() host_environment.update(embedded_function.__globals__) cells = embedded_function.__closure__ cell_names = embedded_function.__code__.co_freevars host_environment.update({var: cells[index] for index, var in enumerate(cell_names)}) # Find out how indented we are. initial_whitespace = re.search(r"^\s*", source_code).group(0) initial_indent = len(initial_whitespace.expandtabs()) # Parse. source_buffer = source.Buffer(source_code, filename, first_line) lexer = source_lexer.Lexer(source_buffer, version=sys.version_info[0:2], diagnostic_engine=self.engine) lexer.indent = [(initial_indent, source.Range(source_buffer, 0, len(initial_whitespace)), initial_whitespace)] parser = source_parser.Parser(lexer, version=sys.version_info[0:2], diagnostic_engine=self.engine) function_node = parser.file_input().body[0] # Mangle the name, since we put everything into a single module. full_function_name = "{}.{}".format(module_name, host_function.__qualname__) if isinstance(function, SpecializedFunction): instance_type = function.instance_type function_node.name = "_Z{}{}I{}{}Ezz".format(len(full_function_name), full_function_name, len(instance_type.name), instance_type.name) else: function_node.name = "_Z{}{}zz".format(len(full_function_name), full_function_name) # Record the function in the function map so that LLVM IR generator # can handle quoting it. self.embedding_map.store_function(function, function_node.name) # Fill in the function type before typing it to handle recursive # invocations. self.functions[function] = types.TFunction(arg_types, optarg_types, ret_type) # Rewrite into typed form. asttyped_rewriter = StitchingASTTypedRewriter( engine=self.engine, prelude=self.prelude, globals=self.globals, host_environment=host_environment, quote=self._quote) function_node = asttyped_rewriter.visit_quoted_function(function_node, embedded_function) function_node.flags = flags # Add it into our typedtree so that it gets inferenced and codegen'd. self._inject(function_node) # Tie the typing knot. self.functions[function].unify(function_node.signature_type) return function_node
def _quote_embedded_function(self, function, flags): if isinstance(function, SpecializedFunction): host_function = function.host_function else: host_function = function if not hasattr(host_function, "artiq_embedded"): raise ValueError("{} is not an embedded function".format(repr(host_function))) # Extract function source. embedded_function = host_function.artiq_embedded.function source_code = inspect.getsource(embedded_function) filename = embedded_function.__code__.co_filename module_name = embedded_function.__globals__['__name__'] first_line = embedded_function.__code__.co_firstlineno # Extract function environment. host_environment = dict() host_environment.update(embedded_function.__globals__) cells = embedded_function.__closure__ cell_names = embedded_function.__code__.co_freevars host_environment.update({var: cells[index] for index, var in enumerate(cell_names)}) # Find out how indented we are. initial_whitespace = re.search(r"^\s*", source_code).group(0) initial_indent = len(initial_whitespace.expandtabs()) # Parse. source_buffer = source.Buffer(source_code, filename, first_line) lexer = source_lexer.Lexer(source_buffer, version=sys.version_info[0:2], diagnostic_engine=self.engine) lexer.indent = [(initial_indent, source.Range(source_buffer, 0, len(initial_whitespace)), initial_whitespace)] parser = source_parser.Parser(lexer, version=sys.version_info[0:2], diagnostic_engine=self.engine) function_node = parser.file_input().body[0] # Mangle the name, since we put everything into a single module. full_function_name = "{}.{}".format(module_name, host_function.__qualname__) if isinstance(function, SpecializedFunction): instance_type = function.instance_type function_node.name = "_Z{}{}I{}{}Ezz".format(len(full_function_name), full_function_name, len(instance_type.name), instance_type.name) else: function_node.name = "_Z{}{}zz".format(len(full_function_name), full_function_name) # Record the function in the function map so that LLVM IR generator # can handle quoting it. self.embedding_map.store_function(function, function_node.name) # Memoize the function type before typing it to handle recursive # invocations. self.functions[function] = types.TVar() # Rewrite into typed form. asttyped_rewriter = StitchingASTTypedRewriter( engine=self.engine, prelude=self.prelude, globals=self.globals, host_environment=host_environment, quote=self._quote) function_node = asttyped_rewriter.visit_quoted_function(function_node, embedded_function) function_node.flags = flags # Add it into our typedtree so that it gets inferenced and codegen'd. self._inject(function_node) # Tie the typing knot. self.functions[function].unify(function_node.signature_type) return function_node