def __init__(self, file, filename, parent_scanner = None, scope = None, context = None, source_encoding=None, parse_comments=True, initial_pos=None): Scanner.__init__(self, get_lexicon(), file, filename, initial_pos) if parent_scanner: self.context = parent_scanner.context self.included_files = parent_scanner.included_files self.compile_time_env = parent_scanner.compile_time_env self.compile_time_eval = parent_scanner.compile_time_eval self.compile_time_expr = parent_scanner.compile_time_expr else: self.context = context self.included_files = scope.included_files self.compile_time_env = initial_compile_time_env() self.compile_time_eval = 1 self.compile_time_expr = 0 self.parse_comments = parse_comments self.source_encoding = source_encoding if filename.is_python_file(): self.in_python_file = True self.keywords = cython.set(py_reserved_words) else: self.in_python_file = False self.keywords = cython.set(pyx_reserved_words) self.trace = trace_scanner self.indentation_stack = [0] self.indentation_char = None self.bracket_nesting_level = 0 self.begin('INDENT') self.sy = '' self.next()
def __init__(self, lhs, rhs, entry): if lhs.cf_state is None: lhs.cf_state = set() self.lhs = lhs self.rhs = rhs self.entry = entry self.pos = lhs.pos self.refs = set() self.is_arg = False
def __init__(self): self.blocks = set() self.entries = set() self.loops = [] self.exceptions = [] self.entry_point = ControlBlock() self.exit_point = ExitBlock() self.blocks.add(self.exit_point) self.block = self.entry_point
def create_extension_list(patterns, exclude=[], ctx=None, aliases=None): seen = set() deps = create_dependency_tree(ctx) to_exclude = set() if not isinstance(exclude, list): exclude = [exclude] for pattern in exclude: to_exclude.update(glob(pattern)) if not isinstance(patterns, list): patterns = [patterns] module_list = [] for pattern in patterns: if isinstance(pattern, str): filepattern = pattern template = None name = '*' base = None exn_type = Extension elif isinstance(pattern, Extension): filepattern = pattern.sources[0] if os.path.splitext(filepattern)[1] not in ('.py', '.pyx'): # ignore non-cython modules module_list.append(pattern) continue template = pattern name = template.name base = DistutilsInfo(exn=template) exn_type = template.__class__ else: raise TypeError(pattern) for file in glob(filepattern): if file in to_exclude: continue pkg = deps.package(file) if '*' in name: module_name = deps.fully_qualifeid_name(file) else: module_name = name if module_name not in seen: kwds = deps.distutils_info(file, aliases, base).values if base is not None: for key, value in base.values.items(): if key not in kwds: kwds[key] = value sources = [file] if template is not None: sources += template.sources[1:] module_list.append(exn_type( name=module_name, sources=sources, **kwds)) m = module_list[-1] seen.add(name) return module_list
def create_extension_list(patterns, exclude=[], ctx=None, aliases=None): seen = set() deps = create_dependency_tree(ctx) to_exclude = set() if not isinstance(exclude, list): exclude = [exclude] for pattern in exclude: to_exclude.update(glob(pattern)) if not isinstance(patterns, list): patterns = [patterns] module_list = [] for pattern in patterns: if isinstance(pattern, str): filepattern = pattern template = None name = '*' base = None exn_type = Extension elif isinstance(pattern, Extension): filepattern = pattern.sources[0] if os.path.splitext(filepattern)[1] not in ('.py', '.pyx'): # ignore non-cython modules module_list.append(pattern) continue template = pattern name = template.name base = DistutilsInfo(exn=template) exn_type = template.__class__ else: raise TypeError(pattern) for file in glob(filepattern): if file in to_exclude: continue pkg = deps.package(file) if '*' in name: module_name = deps.fully_qualifeid_name(file) else: module_name = name if module_name not in seen: kwds = deps.distutils_info(file, aliases, base).values if base is not None: for key, value in list(base.values.items()): if key not in kwds: kwds[key] = value sources = [file] if template is not None: sources += template.sources[1:] module_list.append( exn_type(name=module_name, sources=sources, **kwds)) m = module_list[-1] seen.add(name) return module_list
def __init__(self): self.children = set() self.parents = set() self.positions = set() self.stats = [] self.gen = {} self.bounded = set() self.i_input = 0 self.i_output = 0 self.i_gen = 0 self.i_kill = 0 self.i_state = 0
def cimports_and_externs(self, filename): cimports, includes, externs = self.parse_dependencies(filename)[:3] cimports = set(cimports) externs = set(externs) for include in includes: include_path = os.path.join(os.path.dirname(filename), include) if not os.path.exists(include_path): include_path = self.context.find_include_file(include, None) if include_path: a, b = self.cimports_and_externs(include_path) cimports.update(a) externs.update(b) else: print("Unable to locate '%s' referenced from '%s'" % (filename, include)) return tuple(cimports), tuple(externs)
def cimports_and_externs(self, filename): cimports, includes, externs = self.parse_dependencies(filename)[:3] cimports = set(cimports) externs = set(externs) for include in includes: include_path = os.path.join(os.path.dirname(filename), include) if not os.path.exists(include_path): include_path = self.context.find_include_file(include, None) if include_path: a, b = self.cimports_and_externs(include_path) cimports.update(a) externs.update(b) else: print(("Unable to locate '%s' referenced from '%s'" % (filename, include))) return tuple(cimports), tuple(externs)
def map_one(self, istate, entry): ret = set() assmts = self.assmts[entry] if istate & assmts.bit: ret.add(Uninitialized) for assmt in assmts.stats: if istate & assmt.bit: ret.add(assmt) return ret
def __init__(self, owner, names_taken=cython.set()): self.names_taken = names_taken self.owner = owner self.error_label = None self.label_counter = 0 self.labels_used = cython.set() self.return_label = self.new_label() self.new_error_label() self.continue_label = None self.break_label = None self.in_try_finally = 0 self.exc_vars = None self.temps_allocated = [] # of (name, type, manage_ref) self.temps_free = {} # (type, manage_ref) -> list of free vars with same type/managed status self.temps_used_type = {} # name -> (type, manage_ref) self.temp_counter = 0
def normalize(self): """Delete unreachable and orphan blocks.""" queue = set([self.entry_point]) visited = set() while queue: root = queue.pop() visited.add(root) for child in root.children: if child not in visited: queue.add(child) unreachable = self.blocks - visited for block in unreachable: block.detach() visited.remove(self.entry_point) for block in visited: if block.empty(): for parent in block.parents: # Re-parent for child in block.children: parent.add_child(child) block.detach() unreachable.add(block) self.blocks -= unreachable
def visit_ParallelRangeNode(self, node): reductions = self.reductions # if node.target is None or not a NameNode, an error will have # been previously issued if hasattr(node.target, "entry"): self.reductions = cython.set(reductions) for private_node in node.assigned_nodes: private_node.entry.error_on_uninitialized = True pos, reduction = node.assignments[private_node.entry] if reduction: self.reductions.add(private_node.entry) node = self.visit_ForInStatNode(node) self.reductions = reductions return node
def __init__(self, writer, emit_linenums=False): self.filename_table = {} self.filename_list = [] self.input_file_contents = {} self.utility_codes = cython.set() self.declared_cnames = {} self.in_utility_code_generation = False self.emit_linenums = emit_linenums self.parts = {} self.const_cname_counter = 1 self.string_const_index = {} self.int_const_index = {} self.py_constants = [] assert writer.globalstate is None writer.globalstate = self self.rootwriter = writer
def visit_ParallelRangeNode(self, node): reductions = self.reductions # if node.target is None or not a NameNode, an error will have # been previously issued if hasattr(node.target, 'entry'): self.reductions = cython.set(reductions) for private_node in node.assigned_nodes: private_node.entry.error_on_uninitialized = True pos, reduction = node.assignments[private_node.entry] if reduction: self.reductions.add(private_node.entry) node = self.visit_ForInStatNode(node) self.reductions = reductions return node
def visit_ModuleNode(self, node): self.gv_ctx = GVContext() # Set of NameNode reductions self.reductions = cython.set() self.env_stack = [] self.env = node.scope self.stack = [] self.flow = ControlFlow() self.visitchildren(node) check_definitions(self.flow, self.current_directives) dot_output = self.current_directives["control_flow.dot_output"] if dot_output: annotate_defs = self.current_directives["control_flow.dot_annotate_defs"] fp = open(dot_output, "wt") try: self.gv_ctx.render(fp, "module", annotate_defs=annotate_defs) finally: fp.close() return node
def visit_ModuleNode(self, node): self.gv_ctx = GVContext() # Set of NameNode reductions self.reductions = cython.set() self.env_stack = [] self.env = node.scope self.stack = [] self.flow = ControlFlow() self.visitchildren(node) check_definitions(self.flow, self.current_directives) dot_output = self.current_directives['control_flow.dot_output'] if dot_output: annotate_defs = self.current_directives[ 'control_flow.dot_annotate_defs'] fp = open(dot_output, 'wt') try: self.gv_ctx.render(fp, 'module', annotate_defs=annotate_defs) finally: fp.close() return node
def __init__(self, node, entry): if node.cf_state is None: node.cf_state = set() self.node = node self.entry = entry self.pos = node.pos
def check_definitions(flow, compiler_directives): flow.initialize() flow.reaching_definitions() # Track down state assignments = set() # Node to entry map references = {} assmt_nodes = set() for block in flow.blocks: i_state = block.i_input for stat in block.stats: i_assmts = flow.assmts[stat.entry] state = flow.map_one(i_state, stat.entry) if isinstance(stat, NameAssignment): stat.lhs.cf_state.update(state) assmt_nodes.add(stat.lhs) i_state = i_state & ~i_assmts.mask if stat.rhs: i_state |= stat.bit else: i_state |= i_assmts.bit assignments.add(stat) stat.entry.cf_assignments.append(stat) elif isinstance(stat, NameReference): references[stat.node] = stat.entry stat.entry.cf_references.append(stat) stat.node.cf_state.update(state) if not stat.node.allow_null: i_state &= ~i_assmts.bit state.discard(Uninitialized) for assmt in state: assmt.refs.add(stat) # Check variable usage warn_maybe_uninitialized = compiler_directives['warn.maybe_uninitialized'] warn_unused_result = compiler_directives['warn.unused_result'] warn_unused = compiler_directives['warn.unused'] warn_unused_arg = compiler_directives['warn.unused_arg'] messages = MessageCollection() # assignment hints for node in assmt_nodes: if Uninitialized in node.cf_state: node.cf_maybe_null = True if len(node.cf_state) == 1: node.cf_is_null = True else: node.cf_is_null = False else: node.cf_is_null = False node.cf_maybe_null = False # Find uninitialized references and cf-hints for node, entry in references.items(): if Uninitialized in node.cf_state: node.cf_maybe_null = True if not entry.from_closure and len(node.cf_state) == 1: node.cf_is_null = True if node.allow_null or entry.from_closure: pass # Can be uninitialized here elif node.cf_is_null: if (entry.type.is_pyobject or entry.type.is_unspecified or entry.error_on_uninitialized): messages.error( node.pos, "local variable '%s' referenced before assignment" % entry.name) else: messages.warning( node.pos, "local variable '%s' referenced before assignment" % entry.name) elif warn_maybe_uninitialized: messages.warning( node.pos, "local variable '%s' might be referenced before assignment" % entry.name) else: node.cf_is_null = False node.cf_maybe_null = False # Unused result for assmt in assignments: if not assmt.refs and not assmt.entry.is_pyclass_attr \ and not assmt.entry.in_closure: if assmt.entry.cf_references and warn_unused_result: if assmt.is_arg: messages.warning( assmt.pos, "Unused argument value '%s'" % assmt.entry.name) else: messages.warning( assmt.pos, "Unused result in '%s'" % assmt.entry.name) assmt.lhs.cf_used = False # Unused entries for entry in flow.entries: if not entry.cf_references and not entry.is_pyclass_attr and not entry.in_closure: # TODO: starred args entries are not marked with is_arg flag for assmt in entry.cf_assignments: if assmt.is_arg: is_arg = True break else: is_arg = False if is_arg: if warn_unused_arg: messages.warning(entry.pos, "Unused argument '%s'" % entry.name) # TODO: handle unused arguments entry.cf_used = True else: if warn_unused: messages.warning(entry.pos, "Unused entry '%s'" % entry.name) entry.cf_used = False messages.report()
def __init__(self): CythonTransform.__init__(self, None) self.names = set()
def check_definitions(flow, compiler_directives): flow.initialize() flow.reaching_definitions() # Track down state assignments = set() # Node to entry map references = {} assmt_nodes = set() for block in flow.blocks: i_state = block.i_input for stat in block.stats: i_assmts = flow.assmts[stat.entry] state = flow.map_one(i_state, stat.entry) if isinstance(stat, NameAssignment): stat.lhs.cf_state.update(state) assmt_nodes.add(stat.lhs) i_state = i_state & ~i_assmts.mask if stat.rhs: i_state |= stat.bit else: i_state |= i_assmts.bit assignments.add(stat) stat.entry.cf_assignments.append(stat) elif isinstance(stat, NameReference): references[stat.node] = stat.entry stat.entry.cf_references.append(stat) stat.node.cf_state.update(state) if not stat.node.allow_null: i_state &= ~i_assmts.bit state.discard(Uninitialized) for assmt in state: assmt.refs.add(stat) # Check variable usage warn_maybe_uninitialized = compiler_directives["warn.maybe_uninitialized"] warn_unused_result = compiler_directives["warn.unused_result"] warn_unused = compiler_directives["warn.unused"] warn_unused_arg = compiler_directives["warn.unused_arg"] messages = MessageCollection() # assignment hints for node in assmt_nodes: if Uninitialized in node.cf_state: node.cf_maybe_null = True if len(node.cf_state) == 1: node.cf_is_null = True else: node.cf_is_null = False else: node.cf_is_null = False node.cf_maybe_null = False # Find uninitialized references and cf-hints for node, entry in references.iteritems(): if Uninitialized in node.cf_state: node.cf_maybe_null = True if not entry.from_closure and len(node.cf_state) == 1: node.cf_is_null = True if node.allow_null or entry.from_closure: pass # Can be uninitialized here elif node.cf_is_null: if entry.type.is_pyobject or entry.type.is_unspecified or entry.error_on_uninitialized: messages.error(node.pos, "local variable '%s' referenced before assignment" % entry.name) else: messages.warning(node.pos, "local variable '%s' referenced before assignment" % entry.name) elif warn_maybe_uninitialized: messages.warning(node.pos, "local variable '%s' might be referenced before assignment" % entry.name) else: node.cf_is_null = False node.cf_maybe_null = False # Unused result for assmt in assignments: if not assmt.refs and not assmt.entry.is_pyclass_attr and not assmt.entry.in_closure: if assmt.entry.cf_references and warn_unused_result: if assmt.is_arg: messages.warning(assmt.pos, "Unused argument value '%s'" % assmt.entry.name) else: messages.warning(assmt.pos, "Unused result in '%s'" % assmt.entry.name) assmt.lhs.cf_used = False # Unused entries for entry in flow.entries: if not entry.cf_references and not entry.is_pyclass_attr and not entry.in_closure: # TODO: starred args entries are not marked with is_arg flag for assmt in entry.cf_assignments: if assmt.is_arg: is_arg = True break else: is_arg = False if is_arg: if warn_unused_arg: messages.warning(entry.pos, "Unused argument '%s'" % entry.name) # TODO: handle unused arguments entry.cf_used = True else: if warn_unused: messages.warning(entry.pos, "Unused entry '%s'" % entry.name) entry.cf_used = False messages.report()