Ejemplo n.º 1
0
 def save_annotation(self, source_filename, target_filename):
     with Utils.open_source_file(source_filename) as f:
         code = f.read()
     generated_code = self.code.get(source_filename, {})
     c_file = Utils.decode_filename(os.path.basename(target_filename))
     html_filename = os.path.splitext(target_filename)[0] + ".html"
     with codecs.open(html_filename, "w", encoding="UTF-8") as out_buffer:
         out_buffer.write(self._save_annotation(code, generated_code, c_file))
Ejemplo n.º 2
0
Archivo: Main.py Proyecto: jpe/cython
 def teardown_errors(self, err, options, result):
     source_desc = result.compilation_source.source_desc
     if not isinstance(source_desc, FileSourceDescriptor):
         raise RuntimeError("Only file sources for code supported")
     Errors.close_listing_file()
     result.num_errors = Errors.num_errors
     if result.num_errors > 0:
         err = True
     if err and result.c_file:
         try:
             Utils.castrate_file(result.c_file, os.stat(source_desc.filename))
         except EnvironmentError:
             pass
         result.c_file = None
Ejemplo n.º 3
0
 def teardown_errors(self, err, options, result):
     source_desc = result.compilation_source.source_desc
     if not isinstance(source_desc, FileSourceDescriptor):
         raise RuntimeError("Only file sources for code supported")
     Errors.close_listing_file()
     result.num_errors = Errors.num_errors
     if result.num_errors > 0:
         err = True
     if err and result.c_file:
         try:
             Utils.castrate_file(result.c_file, os.stat(source_desc.filename))
         except EnvironmentError:
             pass
         result.c_file = None
Ejemplo n.º 4
0
    def search_include_directories(self,
                                   qualified_name,
                                   suffix,
                                   pos,
                                   include=False,
                                   sys_path=False):
        # Search the list of include directories for the given
        # file name. If a source file position is given, first
        # searches the directory containing that file. Returns
        # None if not found, but does not report an error.
        # The 'include' option will disable package dereferencing.
        # If 'sys_path' is True, also search sys.path.
        dirs = self.include_directories
        if sys_path:
            dirs = dirs + sys.path
        if pos:
            file_desc = pos[0]
            if not isinstance(file_desc, FileSourceDescriptor):
                raise RuntimeError("Only file sources for code supported")
            if include:
                dirs = [os.path.dirname(file_desc.filename)] + dirs
            else:
                dirs = [self.find_root_package_dir(file_desc.filename)] + dirs

        dotted_filename = qualified_name
        if suffix:
            dotted_filename += suffix
        if not include:
            names = qualified_name.split('.')
            package_names = names[:-1]
            module_name = names[-1]
            module_filename = module_name + suffix
            package_filename = "__init__" + suffix

        for dir in dirs:
            path = os.path.join(dir, dotted_filename)
            if Utils.path_exists(path):
                return path
            if not include:
                package_dir = self.check_package_dir(dir, package_names)
                if package_dir is not None:
                    path = os.path.join(package_dir, module_filename)
                    if Utils.path_exists(path):
                        return path
                    path = os.path.join(dir, package_dir, module_name,
                                        package_filename)
                    if Utils.path_exists(path):
                        return path
        return None
Ejemplo n.º 5
0
 def __init__(self, filename, path_description=None):
     filename = Utils.decode_filename(filename)
     self.path_description = path_description or filename
     self.filename = filename
     self.set_file_type_from_name(filename)
     self._cmp_name = filename
     self._lines = {}
Ejemplo n.º 6
0
 def get_lines(self, encoding=None, error_handling=None):
     # we cache the lines only the second time this is called, in
     # order to save memory when they are only used once
     key = (encoding, error_handling)
     try:
         lines = self._lines[key]
         if lines is not None:
             return lines
     except KeyError:
         pass
     f = Utils.open_source_file(
         self.filename, encoding=encoding,
         error_handling=error_handling,
         # newline normalisation is costly before Py2.6
         require_normalised_newlines=False)
     try:
         lines = list(f)
     finally:
         f.close()
     if key in self._lines:
         self._lines[key] = lines
     else:
         # do not cache the first access, but remember that we
         # already read it once
         self._lines[key] = None
     return lines
Ejemplo n.º 7
0
 def get_lines(self, encoding=None, error_handling=None):
     # we cache the lines only the second time this is called, in
     # order to save memory when they are only used once
     key = (encoding, error_handling)
     try:
         lines = self._lines[key]
         if lines is not None:
             return lines
     except KeyError:
         pass
     f = Utils.open_source_file(
         self.filename,
         encoding=encoding,
         error_handling=error_handling,
         # newline normalisation is costly before Py2.6
         require_normalised_newlines=False)
     try:
         lines = list(f)
     finally:
         f.close()
     if key in self._lines:
         self._lines[key] = lines
     else:
         # do not cache the first access, but remember that we
         # already read it once
         self._lines[key] = None
     return lines
Ejemplo n.º 8
0
 def visit_assignment(self, lhs, rhs):
     if (isinstance(rhs, ExprNodes.IntNode)
             and isinstance(lhs, ExprNodes.NameNode)
             and Utils.long_literal(rhs.value)):
         entry = lhs.entry or self.env.lookup(lhs.name)
         if entry:
             entry.might_overflow = True
Ejemplo n.º 9
0
 def get_lines(self, encoding=None, error_handling=None):
     return Utils.open_source_file(
         self.filename,
         encoding=encoding,
         error_handling=error_handling,
         # newline normalisation is costly before Py2.6
         require_normalised_newlines=False)
Ejemplo n.º 10
0
def parse_dependencies(source_filename):
    # Actual parsing is way to slow, so we use regular expressions.
    # The only catch is that we must strip comments and string
    # literals ahead of time.
    fh = Utils.open_source_file(source_filename, "rU")
    try:
        source = fh.read()
    finally:
        fh.close()
    distutils_info = DistutilsInfo(source)
    source, literals = strip_string_literals(source)
    source = source.replace('\\\n', ' ')
    if '\t' in source:
        source = source.replace('\t', ' ')
    # TODO: pure mode
    dependancy = re.compile(
        r"(cimport +([0-9a-zA-Z_.]+)\b)|(from +([0-9a-zA-Z_.]+) +cimport)|(include +'([^']+)')|(cdef +extern +from +'([^']+)')"
    )
    cimports = []
    includes = []
    externs = []
    for m in dependancy.finditer(source):
        groups = m.groups()
        if groups[0]:
            cimports.append(groups[1])
        elif groups[2]:
            cimports.append(groups[3])
        elif groups[4]:
            includes.append(literals[groups[5]])
        else:
            externs.append(literals[groups[7]])
    return cimports, includes, externs, distutils_info
Ejemplo n.º 11
0
 def visit_assignment(self, lhs, rhs):
     if (isinstance(rhs, ExprNodes.IntNode)
             and isinstance(lhs, ExprNodes.NameNode)
             and Utils.long_literal(rhs.value)):
         entry = lhs.entry or self.env.lookup(lhs.name)
         if entry:
             entry.might_overflow = True
Ejemplo n.º 12
0
def parse_dependencies(source_filename):
    # Actual parsing is way to slow, so we use regular expressions.
    # The only catch is that we must strip comments and string
    # literals ahead of time.
    source = Utils.open_source_file(source_filename, "rU").read()
    distutils_info = DistutilsInfo(source)
    source, literals = strip_string_literals(source)
    source = source.replace('\\\n', ' ')
    if '\t' in source:
        source = source.replace('\t', ' ')
    # TODO: pure mode
    dependancy = re.compile(r"(cimport +([0-9a-zA-Z_.]+)\b)|(from +([0-9a-zA-Z_.]+) +cimport)|(include +'([^']+)')|(cdef +extern +from +'([^']+)')")
    cimports = []
    includes = []
    externs  = []
    for m in dependancy.finditer(source):
        groups = m.groups()
        if groups[0]:
            cimports.append(groups[1])
        elif groups[2]:
            cimports.append(groups[3])
        elif groups[4]:
            includes.append(literals[groups[5]])
        else:
            externs.append(literals[groups[7]])
    return cimports, includes, externs, distutils_info
Ejemplo n.º 13
0
 def setup_errors(self, options, result):
     if options.use_listing_file:
         result.listing_file = Utils.replace_suffix(source, ".lis")
         path = result.listing_file
     else:
         path = None
     Errors.open_listing_file(path=path, echo_to_stderr=options.errors_to_stderr)
Ejemplo n.º 14
0
def parse_dependencies(source_filename):
    # Actual parsing is way to slow, so we use regular expressions.
    # The only catch is that we must strip comments and string
    # literals ahead of time.
    fh = Utils.open_source_file(source_filename, "rU", error_handling='ignore')
    try:
        source = fh.read()
    finally:
        fh.close()
    distutils_info = DistutilsInfo(source)
    source, literals = strip_string_literals(source)
    source = source.replace('\\\n', ' ').replace('\t', ' ')

    # TODO: pure mode
    cimports = []
    includes = []
    externs = []
    for m in dependancy_regex.finditer(source):
        cimport_from, cimport, extern, include = m.groups()
        if cimport_from:
            cimports.append(cimport_from)
        elif cimport:
            cimports.append(cimport)
        elif extern:
            externs.append(literals[extern])
        else:
            includes.append(literals[include])
    return cimports, includes, externs, distutils_info
Ejemplo n.º 15
0
 def parse(self, source_desc, scope, pxd, full_module_name):
     if not isinstance(source_desc, FileSourceDescriptor):
         raise RuntimeError("Only file sources for code supported")
     source_filename = source_desc.filename
     scope.cpp = self.cpp
     # Parse the given source file and return a parse tree.
     try:
         f = Utils.open_source_file(source_filename, "rU")
         try:
             import Parsing
             s = PyrexScanner(f,
                              source_desc,
                              source_encoding=f.encoding,
                              scope=scope,
                              context=self)
             tree = Parsing.p_module(s, pxd, full_module_name)
         finally:
             f.close()
     except UnicodeDecodeError, msg:
         #import traceback
         #traceback.print_exc()
         error((
             source_desc, 0, 0
         ), "Decoding error, missing or incorrect coding=<encoding-name> at top of source (%s)"
               % msg)
Ejemplo n.º 16
0
 def get_int_const(self, str_value, longness=False):
     longness = bool(longness or Utils.long_literal(str_value))
     try:
         c = self.int_const_index[(str_value, longness)]
     except KeyError:
         c = self.new_int_const(str_value, longness)
     return c
Ejemplo n.º 17
0
 def get_int_const(self, str_value, longness=False):
     longness = bool(longness or Utils.long_literal(str_value))
     try:
         c = self.int_const_index[(str_value, longness)]
     except KeyError:
         c = self.new_int_const(str_value, longness)
     return c
Ejemplo n.º 18
0
 def __init__(self, filename, path_description=None):
     filename = Utils.decode_filename(filename)
     self.path_description = path_description or filename
     self.filename = filename
     self.set_file_type_from_name(filename)
     self._cmp_name = filename
     self._lines = {}
Ejemplo n.º 19
0
def parse_dependencies(source_filename):
    # Actual parsing is way to slow, so we use regular expressions.
    # The only catch is that we must strip comments and string
    # literals ahead of time.
    fh = Utils.open_source_file(source_filename, "rU", error_handling='ignore')
    try:
        source = fh.read()
    finally:
        fh.close()
    distutils_info = DistutilsInfo(source)
    source, literals = strip_string_literals(source)
    source = source.replace('\\\n', ' ').replace('\t', ' ')

    # TODO: pure mode
    cimports = []
    includes = []
    externs  = []
    for m in dependancy_regex.finditer(source):
        cimport_from, cimport, extern, include = m.groups()
        if cimport_from:
            cimports.append(cimport_from)
        elif cimport:
            cimports.append(cimport)
        elif extern:
            externs.append(literals[extern])
        else:
            includes.append(literals[include])
    return cimports, includes, externs, distutils_info
Ejemplo n.º 20
0
 def setup_errors(self, options):
     if options.use_listing_file:
         result.listing_file = Utils.replace_suffix(source, ".lis")
         Errors.open_listing_file(result.listing_file,
             echo_to_stderr = options.errors_to_stderr)
     else:
         Errors.open_listing_file(None)
Ejemplo n.º 21
0
 def is_package_dir(self, dir_path):
     #  Return true if the given directory is a package directory.
     for filename in ("__init__.py",
                      "__init__.pyx",
                      "__init__.pxd"):
         path = os.path.join(dir_path, filename)
         if Utils.path_exists(path):
             return 1
Ejemplo n.º 22
0
Archivo: Main.py Proyecto: jpe/cython
 def is_package_dir(self, dir_path):
     #  Return true if the given directory is a package directory.
     for filename in ("__init__.py",
                      "__init__.pyx",
                      "__init__.pxd"):
         path = os.path.join(dir_path, filename)
         if Utils.path_exists(path):
             return 1
Ejemplo n.º 23
0
 def get_lines(self, encoding=None, error_handling=None):
     return Utils.open_source_file(
         self.filename,
         encoding=encoding,
         error_handling=error_handling,
         # newline normalisation is costly before Py2.6
         require_normalised_newlines=False,
     )
Ejemplo n.º 24
0
 def parse(self, source_desc, scope, pxd, full_module_name):
     if not isinstance(source_desc, FileSourceDescriptor):
         raise RuntimeError("Only file sources for code supported")
     source_filename = Utils.encode_filename(source_desc.filename)
     # Parse the given source file and return a parse tree.
     try:
         f = Utils.open_source_file(source_filename, "rU")
         try:
             s = PyrexScanner(f, source_desc, source_encoding = f.encoding,
                              scope = scope, context = self)
             tree = Parsing.p_module(s, pxd, full_module_name)
         finally:
             f.close()
     except UnicodeDecodeError, msg:
         #import traceback
         #traceback.print_exc()
         error((source_desc, 0, 0), "Decoding error, missing or incorrect coding=<encoding-name> at top of source (%s)" % msg)
Ejemplo n.º 25
0
    def search_include_directories(self, qualified_name, suffix, pos,
                                   include=False, sys_path=False):
        # Search the list of include directories for the given
        # file name. If a source file position is given, first
        # searches the directory containing that file. Returns
        # None if not found, but does not report an error.
        # The 'include' option will disable package dereferencing.
        # If 'sys_path' is True, also search sys.path.
        dirs = self.include_directories
        if sys_path:
            dirs = dirs + sys.path
        if pos:
            file_desc = pos[0]
            if not isinstance(file_desc, FileSourceDescriptor):
                raise RuntimeError("Only file sources for code supported")
            if include:
                dirs = [os.path.dirname(file_desc.filename)] + dirs
            else:
                dirs = [self.find_root_package_dir(file_desc.filename)] + dirs

        dotted_filename = qualified_name
        if suffix:
            dotted_filename += suffix
        if not include:
            names = qualified_name.split('.')
            package_names = names[:-1]
            module_name = names[-1]
            module_filename = module_name + suffix
            package_filename = "__init__" + suffix

        for dir in dirs:
            path = os.path.join(dir, dotted_filename)
            if Utils.path_exists(path):
                return path
            if not include:
                package_dir = self.check_package_dir(dir, package_names)
                if package_dir is not None:
                    path = os.path.join(package_dir, module_filename)
                    if Utils.path_exists(path):
                        return path
                    path = os.path.join(dir, package_dir, module_name,
                                        package_filename)
                    if Utils.path_exists(path):
                        return path
        return None
Ejemplo n.º 26
0
 def search_include_directories(self,
                                qualified_name,
                                suffix,
                                pos,
                                include=False,
                                sys_path=False):
     return Utils.search_include_directories(
         tuple(self.include_directories), qualified_name, suffix, pos,
         include, sys_path)
Ejemplo n.º 27
0
 def read_dependency_file(self, source_path):
     dep_path = Utils.replace_suffix(source_path, ".dep")
     if os.path.exists(dep_path):
         f = open(dep_path, "rU")
         chunks = [line.strip().split(" ", 1) for line in f.readlines() if " " in line.strip()]
         f.close()
         return chunks
     else:
         return ()
Ejemplo n.º 28
0
 def setup_errors(self, options, result):
     Errors.reset()  # clear any remaining error state
     if options.use_listing_file:
         result.listing_file = Utils.replace_suffix(source, ".lis")
         path = result.listing_file
     else:
         path = None
     Errors.open_listing_file(path=path,
                              echo_to_stderr=options.errors_to_stderr)
Ejemplo n.º 29
0
 def read_dependency_file(self, source_path):
     dep_path = Utils.replace_suffix(source_path, ".dep")
     if os.path.exists(dep_path):
         f = open(dep_path, "rU")
         chunks = [ line.strip().split(" ", 1)
                    for line in f.readlines()
                    if " " in line.strip() ]
         f.close()
         return chunks
     else:
         return ()
Ejemplo n.º 30
0
 def teardown_errors(self, err, options, result):
     source_desc = result.compilation_source.source_desc
     if not isinstance(source_desc, FileSourceDescriptor):
         raise RuntimeError("Only file sources for code supported")
     Errors.close_listing_file()
     result.num_errors = Errors.num_errors
     if result.num_errors > 0:
         err = True
     if err and result.c_file:
         try:
             Utils.castrate_file(result.c_file, os.stat(source_desc.filename))
         except EnvironmentError:
             pass
         result.c_file = None
     if result.c_file and not options.c_only and c_compile:
         result.object_file = c_compile(result.c_file,
             verbose_flag = options.show_version,
             cplus = options.cplus)
         if not options.obj_only and c_link:
             result.extension_file = c_link(result.object_file,
                 extra_objects = options.objects,
                 verbose_flag = options.show_version,
                 cplus = options.cplus)
Ejemplo n.º 31
0
Archivo: Main.py Proyecto: jpe/cython
def create_default_resultobj(compilation_source, options):
    result = CompilationResult()
    result.main_source_file = compilation_source.source_desc.filename
    result.compilation_source = compilation_source
    source_desc = compilation_source.source_desc
    if options.output_file:
        result.c_file = os.path.join(compilation_source.cwd, options.output_file)
    else:
        if options.cplus:
            c_suffix = ".cpp"
        else:
            c_suffix = ".c"
        result.c_file = Utils.replace_suffix(source_desc.filename, c_suffix)
    return result
Ejemplo n.º 32
0
def create_default_resultobj(compilation_source, options):
    result = CompilationResult()
    result.main_source_file = compilation_source.source_desc.filename
    result.compilation_source = compilation_source
    source_desc = compilation_source.source_desc
    if options.output_file:
        result.c_file = os.path.join(compilation_source.cwd, options.output_file)
    else:
        if options.cplus:
            c_suffix = ".cpp"
        else:
            c_suffix = ".c"
        result.c_file = Utils.replace_suffix(source_desc.filename, c_suffix)
    return result
Ejemplo n.º 33
0
    def parse(self, source_desc, scope, pxd, full_module_name):
        if not isinstance(source_desc, FileSourceDescriptor):
            raise RuntimeError("Only file sources for code supported")
        source_filename = source_desc.filename
        scope.cpp = self.cpp
        # Parse the given source file and return a parse tree.
        num_errors = Errors.num_errors
        try:
            f = Utils.open_source_file(source_filename, "rU")
            try:
                import Parsing
                s = PyrexScanner(f,
                                 source_desc,
                                 source_encoding=f.encoding,
                                 scope=scope,
                                 context=self)
                tree = Parsing.p_module(s, pxd, full_module_name)
            finally:
                f.close()
        except UnicodeDecodeError, e:
            #import traceback
            #traceback.print_exc()
            line = 1
            column = 0
            msg = e.args[-1]
            position = e.args[2]
            encoding = e.args[0]

            f = open(source_filename, "rb")
            try:
                byte_data = f.read()
            finally:
                f.close()

            # FIXME: make this at least a little less inefficient
            for idx, c in enumerate(byte_data):
                if c in (ord('\n'), '\n'):
                    line += 1
                    column = 0
                if idx == position:
                    break

                column += 1

            error(
                (source_desc, line, column),
                "Decoding error, missing or incorrect coding=<encoding-name> "
                "at top of source (cannot decode with encoding %r: %s)" %
                (encoding, msg))
Ejemplo n.º 34
0
Archivo: Code.py Proyecto: dagss/cython
 def generate_int_constants(self):
     consts = [(len(c.value), c.value, c.is_long, c) for c in self.int_const_index.values()]
     consts.sort()
     decls_writer = self.parts["decls"]
     for _, value, longness, c in consts:
         cname = c.cname
         decls_writer.putln("static PyObject *%s;" % cname)
         if longness:
             function = '%s = PyLong_FromString((char *)"%s", 0, 0); %s;'
         elif Utils.long_literal(value):
             function = '%s = PyInt_FromString((char *)"%s", 0, 0); %s;'
         else:
             function = "%s = PyInt_FromLong(%s); %s;"
         init_globals = self.parts["init_globals"]
         init_globals.putln(function % (cname, value, init_globals.error_goto_if_null(cname, self.module_pos)))
Ejemplo n.º 35
0
    def __init__(self, defaults=None, **kw):
        self.include_path = []
        if defaults:
            if isinstance(defaults, CompilationOptions):
                defaults = defaults.__dict__
        else:
            defaults = default_options

        options = dict(defaults)
        options.update(kw)

        # let's assume 'default_options' contains a value for most known compiler options
        # and validate against them
        unknown_options = set(options) - set(default_options)
        # ignore valid options that are not in the defaults
        unknown_options.difference_update(['include_path'])
        if unknown_options:
            message = "got unknown compilation option%s, please remove: %s" % (
                's' if len(unknown_options) > 1 else '',
                ', '.join(unknown_options))
            raise ValueError(message)

        directive_defaults = get_directive_defaults()
        directives = dict(options['compiler_directives'])  # copy mutable field
        # check for invalid directives
        unknown_directives = set(directives) - set(directive_defaults)
        if unknown_directives:
            message = "got unknown compiler directive%s: %s" % (
                's' if len(unknown_directives) > 1 else '',
                ', '.join(unknown_directives))
            raise ValueError(message)
        options['compiler_directives'] = directives
        if directives.get('np_pythran', False) and not options['cplus']:
            import warnings
            warnings.warn("C++ mode forced when in Pythran mode!")
            options['cplus'] = True
        if 'language_level' in directives and 'language_level' not in kw:
            options['language_level'] = directives['language_level']
        elif not options.get('language_level'):
            options['language_level'] = directive_defaults.get(
                'language_level')
        if 'formal_grammar' in directives and 'formal_grammar' not in kw:
            options['formal_grammar'] = directives['formal_grammar']
        if options['cache'] is True:
            options['cache'] = os.path.join(Utils.get_cython_cache_dir(),
                                            'compiler')

        self.__dict__.update(options)
Ejemplo n.º 36
0
    def parse(self, source_desc, scope, pxd, full_module_name):
        if not isinstance(source_desc, FileSourceDescriptor):
            raise RuntimeError("Only file sources for code supported")
        source_filename = source_desc.filename
        scope.cpp = self.cpp
        # Parse the given source file and return a parse tree.
        num_errors = Errors.num_errors
        try:
            f = Utils.open_source_file(source_filename, "rU")
            try:
                from . import Parsing
                s = PyrexScanner(f, source_desc, source_encoding = f.encoding,
                                 scope = scope, context = self)
                tree = Parsing.p_module(s, pxd, full_module_name)
            finally:
                f.close()
        except UnicodeDecodeError as e:
            #import traceback
            #traceback.print_exc()
            line = 1
            column = 0
            msg = e.args[-1]
            position = e.args[2]
            encoding = e.args[0]

            f = open(source_filename, "rb")
            try:
                byte_data = f.read()
            finally:
                f.close()

            # FIXME: make this at least a little less inefficient
            for idx, c in enumerate(byte_data):
                if c in (ord('\n'), '\n'):
                    line += 1
                    column = 0
                if idx == position:
                    break

                column += 1

            error((source_desc, line, column),
                  "Decoding error, missing or incorrect coding=<encoding-name> "
                  "at top of source (cannot decode with encoding %r: %s)" % (encoding, msg))

        if Errors.num_errors > num_errors:
            raise CompileError()
        return tree
Ejemplo n.º 37
0
    def __init__(self, defaults=None, **kw):
        self.include_path = []
        if defaults:
            if isinstance(defaults, CompilationOptions):
                defaults = defaults.__dict__
        else:
            defaults = default_options

        options = dict(defaults)
        options.update(kw)

        # let's assume 'default_options' contains a value for most known compiler options
        # and validate against them
        unknown_options = set(options) - set(default_options)
        # ignore valid options that are not in the defaults
        unknown_options.difference_update(['include_path'])
        if unknown_options:
            message = "got unknown compilation option%s, please remove: %s" % (
                's' if len(unknown_options) > 1 else '',
                ', '.join(unknown_options))
            raise ValueError(message)

        directive_defaults = get_directive_defaults()
        directives = dict(options['compiler_directives'])  # copy mutable field
        # check for invalid directives
        unknown_directives = set(directives) - set(directive_defaults)
        if unknown_directives:
            message = "got unknown compiler directive%s: %s" % (
                's' if len(unknown_directives) > 1 else '',
                ', '.join(unknown_directives))
            raise ValueError(message)
        options['compiler_directives'] = directives
        if directives.get('np_pythran', False) and not options['cplus']:
            import warnings
            warnings.warn("C++ mode forced when in Pythran mode!")
            options['cplus'] = True
        if 'language_level' in directives and 'language_level' not in kw:
            options['language_level'] = directives['language_level']
        elif not options.get('language_level'):
            options['language_level'] = directive_defaults.get('language_level')
        if 'formal_grammar' in directives and 'formal_grammar' not in kw:
            options['formal_grammar'] = directives['formal_grammar']
        if options['cache'] is True:
            options['cache'] = os.path.join(Utils.get_cython_cache_dir(), 'compiler')

        self.__dict__.update(options)
Ejemplo n.º 38
0
Archivo: Main.py Proyecto: jpe/cython
 def c_file_out_of_date(self, source_path):
     c_path = Utils.replace_suffix(source_path, ".c")
     if not os.path.exists(c_path):
         return 1
     c_time = Utils.modification_time(c_path)
     if Utils.file_newer_than(source_path, c_time):
         return 1
     pos = [source_path]
     pxd_path = Utils.replace_suffix(source_path, ".pxd")
     if os.path.exists(pxd_path) and Utils.file_newer_than(pxd_path, c_time):
         return 1
     for kind, name in self.read_dependency_file(source_path):
         if kind == "cimport":
             dep_path = self.find_pxd_file(name, pos)
         elif kind == "include":
             dep_path = self.search_include_directories(name, pos)
         else:
             continue
         if dep_path and Utils.file_newer_than(dep_path, c_time):
             return 1
     return 0
Ejemplo n.º 39
0
 def c_file_out_of_date(self, source_path):
     c_path = Utils.replace_suffix(source_path, ".c")
     if not os.path.exists(c_path):
         return 1
     c_time = Utils.modification_time(c_path)
     if Utils.file_newer_than(source_path, c_time):
         return 1
     pos = [source_path]
     pxd_path = Utils.replace_suffix(source_path, ".pxd")
     if os.path.exists(pxd_path) and Utils.file_newer_than(pxd_path, c_time):
         return 1
     for kind, name in self.read_dependency_file(source_path):
         if kind == "cimport":
             dep_path = self.find_pxd_file(name, pos)
         elif kind == "include":
             dep_path = self.search_include_directories(name, pos)
         else:
             continue
         if dep_path and Utils.file_newer_than(dep_path, c_time):
             return 1
     return 0
Ejemplo n.º 40
0
def compile_cython_modules(profile=False, compile_more=False, cython_with_refnanny=False):
    source_root = os.path.abspath(os.path.dirname(__file__))
    compiled_modules = [
        "Cython.Plex.Scanners",
        "Cython.Plex.Actions",
        "Cython.Compiler.Lexicon",
        "Cython.Compiler.Scanning",
        "Cython.Compiler.Parsing",
        "Cython.Compiler.Visitor",
        "Cython.Compiler.FlowControl",
        "Cython.Compiler.Code",
        "Cython.Runtime.refnanny",
        # "Cython.Compiler.FusedNode",
    ]
    if compile_more:
        compiled_modules.extend(
            [
                "Cython.Build.Dependencies",
                "Cython.Compiler.ParseTreeTransforms",
                "Cython.Compiler.Nodes",
                "Cython.Compiler.ExprNodes",
                "Cython.Compiler.ModuleNode",
                "Cython.Compiler.Optimize",
            ]
        )

    defines = []
    if cython_with_refnanny:
        defines.append(("CYTHON_REFNANNY", "1"))

    extensions = []
    if sys.version_info[0] >= 3:
        from Cython.Distutils import build_ext as build_ext_orig

        for module in compiled_modules:
            source_file = os.path.join(source_root, *module.split("."))
            if os.path.exists(source_file + ".py"):
                pyx_source_file = source_file + ".py"
            else:
                pyx_source_file = source_file + ".pyx"
            dep_files = []
            if os.path.exists(source_file + ".pxd"):
                dep_files.append(source_file + ".pxd")
            if ".refnanny" in module:
                defines_for_module = []
            else:
                defines_for_module = defines
            extensions.append(
                Extension(module, sources=[pyx_source_file], define_macros=defines_for_module, depends=dep_files)
            )

        class build_ext(build_ext_orig):
            # we must keep the original modules alive to make sure
            # their code keeps working when we remove them from
            # sys.modules
            dead_modules = []

            def build_extensions(self):
                # add path where 2to3 installed the transformed sources
                # and make sure Python (re-)imports them from there
                already_imported = [
                    module for module in sys.modules if module == "Cython" or module.startswith("Cython.")
                ]
                keep_alive = self.dead_modules.append
                for module in already_imported:
                    keep_alive(sys.modules[module])
                    del sys.modules[module]
                sys.path.insert(0, os.path.join(source_root, self.build_lib))

                if profile:
                    from Cython.Compiler.Options import directive_defaults

                    directive_defaults["profile"] = True
                    print("Enabled profiling for the Cython binary modules")
                build_ext_orig.build_extensions(self)

        setup_args["ext_modules"] = extensions
        add_command_class("build_ext", build_ext)

    else:  # Python 2.x
        from distutils.command.build_ext import build_ext as build_ext_orig

        try:

            class build_ext(build_ext_orig):
                def build_extension(self, ext, *args, **kargs):
                    try:
                        build_ext_orig.build_extension(self, ext, *args, **kargs)
                    except StandardError:
                        print("Compilation of '%s' failed" % ext.sources[0])

            from Cython.Compiler.Main import compile
            from Cython import Utils

            if profile:
                from Cython.Compiler.Options import directive_defaults

                directive_defaults["profile"] = True
                print("Enabled profiling for the Cython binary modules")
            source_root = os.path.dirname(__file__)
            for module in compiled_modules:
                source_file = os.path.join(source_root, *module.split("."))
                if os.path.exists(source_file + ".py"):
                    pyx_source_file = source_file + ".py"
                else:
                    pyx_source_file = source_file + ".pyx"
                c_source_file = source_file + ".c"
                source_is_newer = False
                if not os.path.exists(c_source_file):
                    source_is_newer = True
                else:
                    c_last_modified = Utils.modification_time(c_source_file)
                    if Utils.file_newer_than(pyx_source_file, c_last_modified):
                        source_is_newer = True
                    else:
                        pxd_source_file = source_file + ".pxd"
                        if os.path.exists(pxd_source_file) and Utils.file_newer_than(pxd_source_file, c_last_modified):
                            source_is_newer = True
                if source_is_newer:
                    print("Compiling module %s ..." % module)
                    result = compile(pyx_source_file)
                    c_source_file = result.c_file
                if c_source_file:
                    # Py2 distutils can't handle unicode file paths
                    if isinstance(c_source_file, unicode):
                        filename_encoding = sys.getfilesystemencoding()
                        if filename_encoding is None:
                            filename_encoding = sys.getdefaultencoding()
                        c_source_file = c_source_file.encode(filename_encoding)
                    if ".refnanny" in module:
                        defines_for_module = []
                    else:
                        defines_for_module = defines
                    extensions.append(Extension(module, sources=[c_source_file], define_macros=defines_for_module))
                else:
                    print("Compilation failed")
            if extensions:
                setup_args["ext_modules"] = extensions
                add_command_class("build_ext", build_ext)
        except Exception:
            print(
                """
ERROR: %s

Extension module compilation failed, looks like Cython cannot run
properly on this system.  To work around this, pass the option
"--no-cython-compile".  This will install a pure Python version of
Cython without compiling its own sources.
"""
                % sys.exc_info()[1]
            )
            raise
Ejemplo n.º 41
0
 def check_package_dir(self, dir, package_names):
     return Utils.check_package_dir(dir, tuple(package_names))
Ejemplo n.º 42
0
    def save_annotation(self, source_filename, target_filename):
        self.mark_pos(None)
        f = Utils.open_source_file(source_filename)
        lines = f.readlines()
        for k, line in enumerate(lines):
            for c, cc, html in special_chars:
                line = line.replace(c, cc)
            lines[k] = line
        f.close()
        all = []
        if False:
            for pos, item in self.annotations:
                if pos[0].filename == source_filename:
                    start = item.start()
                    size, end = item.end()
                    if size:
                        all.append((pos, start))
                        all.append(
                            ((source_filename, pos[1], pos[2] + size), end))
                    else:
                        all.append((pos, start + end))

        all.sort(reverse=True)
        for pos, item in all:
            _, line_no, col = pos
            line_no -= 1
            col += 1
            line = lines[line_no]
            lines[line_no] = line[:col] + item + line[col:]

        html_filename = os.path.splitext(target_filename)[0] + ".html"
        f = codecs.open(html_filename, "w", encoding="UTF-8")
        f.write(u'<!DOCTYPE html>\n')
        f.write(u'<!-- Generated by Cython %s -->\n' % Version.watermark)
        f.write(u'<html>\n')
        f.write(u"""
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<style type="text/css">

body { font-family: courier; font-size: 12; }

.code  { font-size: 9; color: #444444; display: none; margin-left: 20px; }
.py_c_api  { color: red; }
.py_macro_api  { color: #FF7000; }
.pyx_c_api  { color: #FF3000; }
.pyx_macro_api  { color: #FF7000; }
.refnanny  { color: #FFA000; }

.error_goto  { color: #FFA000; }

.tag  {  }

.coerce  { color: #008000; border: 1px dotted #008000 }

.py_attr { color: #FF0000; font-weight: bold; }
.c_attr  { color: #0000FF; }

.py_call { color: #FF0000; font-weight: bold; }
.c_call  { color: #0000FF; }

.line { margin: 0em }

</style>
<script>
function toggleDiv(id) {
    theDiv = document.getElementById(id);
    if (theDiv.style.display != 'block') theDiv.style.display = 'block';
    else theDiv.style.display = 'none';
}
</script>
</head>
        """)
        f.write(u'<body>\n')
        f.write(u'<p>Generated by Cython %s\n' % Version.watermark)
        c_file = Utils.decode_filename(os.path.basename(target_filename))
        f.write(u'<p>Raw output: <a href="%s">%s</a>\n' % (c_file, c_file))

        zero_calls = dict(
            (name, 0) for name in
            'refnanny py_macro_api py_c_api pyx_macro_api pyx_c_api error_goto'
            .split())

        def annotate(match):
            group_name = match.lastgroup
            calls[group_name] += 1
            return ur"<span class='%s'>%s</span>" % (group_name,
                                                     match.group(group_name))

        pos_comment_marker = u'/* \N{HORIZONTAL ELLIPSIS} */\n'
        k = 0
        code_source_file = self.code.get(source_filename, {})
        for line in lines:
            k += 1
            try:
                code = code_source_file[k]
            except KeyError:
                code = ''
            else:
                code = _replace_pos_comment(pos_comment_marker, code)
                if code.startswith(pos_comment_marker):
                    code = code[len(pos_comment_marker):]
                code = html_escape(code)

            calls = zero_calls.copy()
            code = _parse_code(annotate, code)
            score = (5 * calls['py_c_api'] + 2 * calls['pyx_c_api'] +
                     calls['py_macro_api'] + calls['pyx_macro_api'])
            color = u"FFFF%02x" % int(255 / (1 + score / 10.0))
            f.write(
                u"<pre class='line' style='background-color: #%s' onclick='toggleDiv(\"line%s\")'>"
                % (color, k))

            f.write(u" %d: " % k)
            for c, cc, html in special_chars:
                line = line.replace(cc, html)
            f.write(line.rstrip())

            f.write(u'</pre>\n')
            f.write(
                u"<pre id='line%s' class='code' style='background-color: #%s'>%s</pre>"
                % (k, color, code))
        f.write(u'</body></html>\n')
        f.close()
Ejemplo n.º 43
0
 def is_package_dir(self, dir_path):
     return Utils.is_package_dir(dir_path)
Ejemplo n.º 44
0
 def check_package_dir(self, dir, package_names):
     return Utils.check_package_dir(dir, tuple(package_names))
Ejemplo n.º 45
0
 def find_root_package_dir(self, file_path):
     return Utils.find_root_package_dir(file_path)
Ejemplo n.º 46
0
 def search_include_directories(self, qualified_name, suffix, pos,
                                include=False, sys_path=False):
     return Utils.search_include_directories(
         tuple(self.include_directories), qualified_name, suffix, pos, include, sys_path)
Ejemplo n.º 47
0
 def is_package_dir(self, dir_path):
     return Utils.is_package_dir(dir_path)
Ejemplo n.º 48
0
 def get_lines(self, encoding=None, error_handling=None):
     if not encoding:
         return Utils.open_source_file(self.filename)
     else:
         return codecs.open(self.filename, "rU", encoding=encoding,
                            errors=error_handling)
Ejemplo n.º 49
0
 def __init__(self, filename):
     filename = Utils.decode_filename(filename)
     self.filename = filename
     self.set_file_type_from_name(filename)
     self._cmp_name = filename
Ejemplo n.º 50
0
def compile_cython_modules(profile=False,
                           compile_more=False,
                           cython_with_refnanny=False):
    source_root = os.path.abspath(os.path.dirname(__file__))
    compiled_modules = [
        "Cython.Plex.Scanners",
        "Cython.Plex.Actions",
        "Cython.Compiler.Lexicon",
        "Cython.Compiler.Scanning",
        "Cython.Compiler.Parsing",
        "Cython.Compiler.Visitor",
        "Cython.Compiler.FlowControl",
        "Cython.Compiler.Code",
        "Cython.Runtime.refnanny",
    ]
    if compile_more:
        compiled_modules.extend([
            "Cython.Compiler.ParseTreeTransforms",
            "Cython.Compiler.Nodes",
            "Cython.Compiler.ExprNodes",
            "Cython.Compiler.ModuleNode",
            "Cython.Compiler.Optimize",
        ])

    defines = []
    if cython_with_refnanny:
        defines.append(('CYTHON_REFNANNY', '1'))

    extensions = []
    if sys.version_info[0] >= 3:
        from Cython.Distutils import build_ext as build_ext_orig
        for module in compiled_modules:
            source_file = os.path.join(source_root, *module.split('.'))
            if os.path.exists(source_file + ".py"):
                pyx_source_file = source_file + ".py"
            else:
                pyx_source_file = source_file + ".pyx"
            dep_files = []
            if os.path.exists(source_file + '.pxd'):
                dep_files.append(source_file + '.pxd')
            if '.refnanny' in module:
                defines_for_module = []
            else:
                defines_for_module = defines
            extensions.append(
                Extension(module,
                          sources=[pyx_source_file],
                          define_macros=defines_for_module,
                          depends=dep_files))

        class build_ext(build_ext_orig):
            # we must keep the original modules alive to make sure
            # their code keeps working when we remove them from
            # sys.modules
            dead_modules = []

            def build_extensions(self):
                # add path where 2to3 installed the transformed sources
                # and make sure Python (re-)imports them from there
                already_imported = [
                    module for module in sys.modules
                    if module == 'Cython' or module.startswith('Cython.')
                ]
                keep_alive = self.dead_modules.append
                for module in already_imported:
                    keep_alive(sys.modules[module])
                    del sys.modules[module]
                sys.path.insert(0, os.path.join(source_root, self.build_lib))

                if profile:
                    from Cython.Compiler.Options import directive_defaults
                    directive_defaults['profile'] = True
                    print("Enabled profiling for the Cython binary modules")
                build_ext_orig.build_extensions(self)

        setup_args['ext_modules'] = extensions
        add_command_class("build_ext", build_ext)

    else:  # Python 2.x
        from distutils.command.build_ext import build_ext as build_ext_orig
        try:

            class build_ext(build_ext_orig):
                def build_extension(self, ext, *args, **kargs):
                    try:
                        build_ext_orig.build_extension(self, ext, *args,
                                                       **kargs)
                    except StandardError:
                        print("Compilation of '%s' failed" % ext.sources[0])

            from Cython.Compiler.Main import compile
            from Cython import Utils
            if profile:
                from Cython.Compiler.Options import directive_defaults
                directive_defaults['profile'] = True
                print("Enabled profiling for the Cython binary modules")
            source_root = os.path.dirname(__file__)
            for module in compiled_modules:
                source_file = os.path.join(source_root, *module.split('.'))
                if os.path.exists(source_file + ".py"):
                    pyx_source_file = source_file + ".py"
                else:
                    pyx_source_file = source_file + ".pyx"
                c_source_file = source_file + ".c"
                source_is_newer = False
                if not os.path.exists(c_source_file):
                    source_is_newer = True
                else:
                    c_last_modified = Utils.modification_time(c_source_file)
                    if Utils.file_newer_than(pyx_source_file, c_last_modified):
                        source_is_newer = True
                    else:
                        pxd_source_file = source_file + ".pxd"
                        if os.path.exists(
                                pxd_source_file) and Utils.file_newer_than(
                                    pxd_source_file, c_last_modified):
                            source_is_newer = True
                if source_is_newer:
                    print("Compiling module %s ..." % module)
                    result = compile(pyx_source_file)
                    c_source_file = result.c_file
                if c_source_file:
                    # Py2 distutils can't handle unicode file paths
                    if isinstance(c_source_file, unicode):
                        filename_encoding = sys.getfilesystemencoding()
                        if filename_encoding is None:
                            filename_encoding = sys.getdefaultencoding()
                        c_source_file = c_source_file.encode(filename_encoding)
                    if '.refnanny' in module:
                        defines_for_module = []
                    else:
                        defines_for_module = defines
                    extensions.append(
                        Extension(module,
                                  sources=[c_source_file],
                                  define_macros=defines_for_module))
                else:
                    print("Compilation failed")
            if extensions:
                setup_args['ext_modules'] = extensions
                add_command_class("build_ext", build_ext)
        except Exception:
            print('''
ERROR: %s

Extension module compilation failed, looks like Cython cannot run
properly on this system.  To work around this, pass the option
"--no-cython-compile".  This will install a pure Python version of
Cython without compiling its own sources.
''' % sys.exc_info()[1])
            raise
Ejemplo n.º 51
0
    def save_annotation(self, source_filename, target_filename):
        self.mark_pos(None)
        f = Utils.open_source_file(source_filename)
        lines = f.readlines()
        for k in range(len(lines)):
            line = lines[k]
            for c, cc, html in special_chars:
                line = line.replace(c, cc)
            lines[k] = line
        f.close()
        all = []
        for pos, item in self.annotations:
            if pos[0] == source_filename:
                start = item.start()
                size, end = item.end()
                if size:
                    all.append((pos, start))
                    all.append(((source_filename, pos[1], pos[2] + size), end))
                else:
                    all.append((pos, start + end))

        all.sort()
        all.reverse()
        for pos, item in all:
            _, line_no, col = pos
            line_no -= 1
            col += 1
            line = lines[line_no]
            lines[line_no] = line[:col] + item + line[col:]

        html_filename = os.path.splitext(target_filename)[0] + ".html"
        f = codecs.open(html_filename, "w", encoding="UTF-8")
        f.write(u'<html>\n')
        f.write(u"""
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<style type="text/css">

body { font-family: courier; font-size: 12; }

.code  { font-size: 9; color: #444444; display: none; margin-left: 20px; }
.py_api  { color: red; }
.pyx_api  { color: #FF3000; }
.py_macro_api  { color: #FF8000; }
.error_goto  { color: #FF8000; }

.tag  {  }

.coerce  { color: #008000; border: 1px dotted #008000 }

.py_attr { color: #FF0000; font-weight: bold; }
.c_attr  { color: #0000FF; }

.py_call { color: #FF0000; font-weight: bold; }
.c_call  { color: #0000FF; }

.line { margin: 0em }

</style>
<script>
function toggleDiv(id) {
    theDiv = document.getElementById(id);
    if (theDiv.style.display == 'none') theDiv.style.display = 'block';
    else theDiv.style.display = 'none';
}
</script>
</head>
        """)
        f.write(u'<body>\n')
        f.write(u'<p>Generated by Cython %s on %s\n' %
                (Version.version, time.asctime()))
        c_file = Utils.encode_filename(os.path.basename(target_filename))
        f.write(u'<p>Raw output: <a href="%s">%s</a>\n' % (c_file, c_file))
        k = 0

        py_c_api = re.compile(u'(Py[A-Z][a-z]+_[A-Z][a-z][A-Za-z_]+)')
        pyx_api = re.compile(u'(__Pyx[A-Za-z_]+)\(')
        py_marco_api = re.compile(u'(Py[A-Za-z]*_[A-Z][A-Z_]+)')
        error_goto = re.compile(
            ur'((; *if .*)? \{__pyx_filename = .*goto __pyx_L\w+;\})')

        for line in lines:

            k += 1
            try:
                code = self.code[k]
            except KeyError:
                code = ''

            code, c_api_calls = py_c_api.subn(
                ur"<span class='py_api'>\1</span>", code)
            code, pyx_api_calls = pyx_api.subn(
                ur"<span class='pyx_api'>\1</span>(", code)
            code, macro_api_calls = py_marco_api.subn(
                ur"<span class='py_macro_api'>\1</span>", code)
            code, error_goto_calls = error_goto.subn(
                ur"<span class='error_goto'>\1</span>", code)

            code = code.replace(u"<span class='error_goto'>;",
                                u";<span class='error_goto'>")

            color = u"FFFF%02x" % int(
                255 /
                (1 + (5 * c_api_calls + 2 * pyx_api_calls + macro_api_calls) /
                 10.0))
            f.write(
                u"<pre class='line' style='background-color: #%s' onclick='toggleDiv(\"line%s\")'>"
                % (color, k))

            f.write(u" %d: " % k)
            for c, cc, html in special_chars:
                line = line.replace(cc, html)
            f.write(line.rstrip())

            f.write(u'</pre>\n')
            code = re.sub(line_pos_comment, '',
                          code)  # inline annotations are redundant
            f.write(
                u"<pre id='line%s' class='code' style='background-color: #%s'>%s</pre>"
                % (k, color, code))
        f.write(u'</body></html>\n')
        f.close()
Ejemplo n.º 52
0
    def infer_types(self, scope):
        enabled = scope.directives['infer_types']
        verbose = scope.directives['infer_types.verbose']

        if enabled == True:
            spanning_type = aggressive_spanning_type
        elif enabled is None:  # safe mode
            spanning_type = safe_spanning_type
        else:
            for entry in scope.entries.values():
                if entry.type is unspecified_type:
                    entry.type = py_object_type
            return

        dependancies_by_entry = {}  # entry -> dependancies
        entries_by_dependancy = {}  # dependancy -> entries
        ready_to_infer = []
        for name, entry in scope.entries.items():
            if entry.type is unspecified_type:
                if entry.in_closure or entry.from_closure:
                    # cross-closure type inference is not currently supported
                    entry.type = py_object_type
                    continue
                all = set()
                for assmt in entry.cf_assignments:
                    all.update(assmt.type_dependencies(scope))
                if all:
                    dependancies_by_entry[entry] = all
                    for dep in all:
                        if dep not in entries_by_dependancy:
                            entries_by_dependancy[dep] = set([entry])
                        else:
                            entries_by_dependancy[dep].add(entry)
                else:
                    ready_to_infer.append(entry)

        def resolve_dependancy(dep):
            if dep in entries_by_dependancy:
                for entry in entries_by_dependancy[dep]:
                    entry_deps = dependancies_by_entry[entry]
                    entry_deps.remove(dep)
                    if not entry_deps and entry != dep:
                        del dependancies_by_entry[entry]
                        ready_to_infer.append(entry)

        # Try to infer things in order...
        while True:
            while ready_to_infer:
                entry = ready_to_infer.pop()
                types = [
                    assmt.rhs.infer_type(scope)
                    for assmt in entry.cf_assignments
                ]
                if types and Utils.all(types):
                    entry.type = spanning_type(types, entry.might_overflow)
                else:
                    # FIXME: raise a warning?
                    # print "No assignments", entry.pos, entry
                    entry.type = py_object_type
                if verbose:
                    message(
                        entry.pos, "inferred '%s' to be of type '%s'" %
                        (entry.name, entry.type))
                resolve_dependancy(entry)
            # Deal with simple circular dependancies...
            for entry, deps in dependancies_by_entry.items():
                if len(deps) == 1 and deps == set([entry]):
                    types = [
                        assmt.infer_type(scope)
                        for assmt in entry.cf_assignments
                        if assmt.type_dependencies(scope) == ()
                    ]
                    if types:
                        entry.type = spanning_type(types, entry.might_overflow)
                        types = [
                            assmt.infer_type(scope)
                            for assmt in entry.cf_assignments
                        ]
                        entry.type = spanning_type(
                            types, entry.might_overflow)  # might be wider...
                        resolve_dependancy(entry)
                        del dependancies_by_entry[entry]
                        if ready_to_infer:
                            break
            if not ready_to_infer:
                break

        # We can't figure out the rest with this algorithm, let them be objects.
        for entry in dependancies_by_entry:
            entry.type = py_object_type
            if verbose:
                message(
                    entry.pos, "inferred '%s' to be of type '%s' (default)" %
                    (entry.name, entry.type))
Ejemplo n.º 53
0
 def __init__(self, outfile_name):
     self.f = Utils.open_new_file(outfile_name)
     self.level = 0
Ejemplo n.º 54
0
    def save_annotation(self, source_filename, target_filename):
        self.mark_pos(None)
        f = Utils.open_source_file(source_filename)
        lines = f.readlines()
        for k in range(len(lines)):
            line = lines[k]
            for c, cc, html in special_chars:
                line = line.replace(c, cc)
            lines[k] = line
        f.close()
        all = []
        if False:
            for pos, item in self.annotations:
                if pos[0].filename == source_filename:
                    start = item.start()
                    size, end = item.end()
                    if size:
                        all.append((pos, start))
                        all.append(((source_filename, pos[1], pos[2]+size), end))
                    else:
                        all.append((pos, start+end))

        all.sort()
        all.reverse()
        for pos, item in all:
            _, line_no, col = pos
            line_no -= 1
            col += 1
            line = lines[line_no]
            lines[line_no] = line[:col] + item + line[col:]

        html_filename = os.path.splitext(target_filename)[0] + ".html"
        f = codecs.open(html_filename, "w", encoding="UTF-8")
        f.write(u'<html>\n')
        f.write(u"""
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<style type="text/css">

body { font-family: courier; font-size: 12; }

.code  { font-size: 9; color: #444444; display: none; margin-left: 20px; }
.py_c_api  { color: red; }
.py_macro_api  { color: #FF7000; }
.pyx_c_api  { color: #FF3000; }
.pyx_macro_api  { color: #FF7000; }
.refnanny  { color: #FFA000; }

.error_goto  { color: #FFA000; }

.tag  {  }

.coerce  { color: #008000; border: 1px dotted #008000 }

.py_attr { color: #FF0000; font-weight: bold; }
.c_attr  { color: #0000FF; }

.py_call { color: #FF0000; font-weight: bold; }
.c_call  { color: #0000FF; }

.line { margin: 0em }

</style>
<script>
function toggleDiv(id) {
    theDiv = document.getElementById(id);
    if (theDiv.style.display == 'none') theDiv.style.display = 'block';
    else theDiv.style.display = 'none';
}
</script>
</head>
        """)
        f.write(u'<body>\n')
        f.write(u'<p>Generated by Cython %s on %s\n' % (Version.version, time.asctime()))
        c_file = Utils.decode_filename(os.path.basename(target_filename))
        f.write(u'<p>Raw output: <a href="%s">%s</a>\n' % (c_file, c_file))
        k = 0

        py_c_api = re.compile(u'(Py[A-Z][a-z]+_[A-Z][a-z][A-Za-z_]+)\(')
        py_marco_api = re.compile(u'(Py[A-Z][a-z]+_[A-Z][A-Z_]+)\(')
        pyx_c_api = re.compile(u'(__Pyx_[A-Z][a-z_][A-Za-z_]+)\(')
        pyx_macro_api = re.compile(u'(__Pyx_[A-Z][A-Z_]+)\(')
        error_goto = re.compile(ur'((; *if .*)? \{__pyx_filename = .*goto __pyx_L\w+;\})')
        refnanny = re.compile(u'(__Pyx_X?(GOT|GIVE)REF|__Pyx_RefNanny[A-Za-z]+)')

        code_source_file = self.code[source_filename]
        for line in lines:

            k += 1
            try:
                code = code_source_file[k]
            except KeyError:
                code = ''

            code = code.replace('<', '<code><</code>')

            code, py_c_api_calls = py_c_api.subn(ur"<span class='py_c_api'>\1</span>(", code)
            code, pyx_c_api_calls = pyx_c_api.subn(ur"<span class='pyx_c_api'>\1</span>(", code)
            code, py_macro_api_calls = py_marco_api.subn(ur"<span class='py_macro_api'>\1</span>(", code)
            code, pyx_macro_api_calls = pyx_macro_api.subn(ur"<span class='pyx_macro_api'>\1</span>(", code)
            code, refnanny_calls = refnanny.subn(ur"<span class='refnanny'>\1</span>", code)
            code, error_goto_calls = error_goto.subn(ur"<span class='error_goto'>\1</span>", code)

            code = code.replace(u"<span class='error_goto'>;", u";<span class='error_goto'>")

            score = 5*py_c_api_calls + 2*pyx_c_api_calls + py_macro_api_calls + pyx_macro_api_calls - refnanny_calls
            color = u"FFFF%02x" % int(255/(1+score/10.0))
            f.write(u"<pre class='line' style='background-color: #%s' onclick='toggleDiv(\"line%s\")'>" % (color, k))

            f.write(u" %d: " % k)
            for c, cc, html in special_chars:
                line = line.replace(cc, html)
            f.write(line.rstrip())

            f.write(u'</pre>\n')
            code = re.sub(line_pos_comment, '', code) # inline annotations are redundant
            f.write(u"<pre id='line%s' class='code' style='background-color: #%s'>%s</pre>" % (k, color, code))
        f.write(u'</body></html>\n')
        f.close()
Ejemplo n.º 55
0
    def infer_types(self, scope):
        enabled = scope.directives['infer_types']
        verbose = scope.directives['infer_types.verbose']

        if enabled == True:
            spanning_type = aggressive_spanning_type
        elif enabled is None: # safe mode
            spanning_type = safe_spanning_type
        else:
            for entry in scope.entries.values():
                if entry.type is unspecified_type:
                    self.set_entry_type(entry, py_object_type)
            return

        # Set of assignemnts
        assignments = set([])
        assmts_resolved = set([])
        dependencies = {}
        assmt_to_names = {}

        for name, entry in scope.entries.items():
            for assmt in entry.cf_assignments:
                names = assmt.type_dependencies()
                assmt_to_names[assmt] = names
                assmts = set()
                for node in names:
                    assmts.update(node.cf_state)
                dependencies[assmt] = assmts
            if entry.type is unspecified_type:
                assignments.update(entry.cf_assignments)
            else:
                assmts_resolved.update(entry.cf_assignments)

        def infer_name_node_type(node):
            types = [assmt.inferred_type for assmt in node.cf_state]
            if not types:
                node_type = py_object_type
            else:
                node_type = spanning_type(
                    types, entry.might_overflow, entry.pos)
            node.inferred_type = node_type

        def infer_name_node_type_partial(node):
            types = [assmt.inferred_type for assmt in node.cf_state
                     if assmt.inferred_type is not None]
            if not types:
                return
            return spanning_type(types, entry.might_overflow, entry.pos)

        def resolve_assignments(assignments):
            resolved = set()
            for assmt in assignments:
                deps = dependencies[assmt]
                # All assignments are resolved
                if assmts_resolved.issuperset(deps):
                    for node in assmt_to_names[assmt]:
                        infer_name_node_type(node)
                    # Resolve assmt
                    inferred_type = assmt.infer_type()
                    done = False
                    assmts_resolved.add(assmt)
                    resolved.add(assmt)
            assignments -= resolved
            return resolved

        def partial_infer(assmt):
            partial_types = []
            for node in assmt_to_names[assmt]:
                partial_type = infer_name_node_type_partial(node)
                if partial_type is None:
                    return False
                partial_types.append((node, partial_type))
            for node, partial_type in partial_types:
                node.inferred_type = partial_type
            assmt.infer_type()
            return True

        partial_assmts = set()
        def resolve_partial(assignments):
            # try to handle circular references
            partials = set()
            for assmt in assignments:
                partial_types = []
                if assmt in partial_assmts:
                    continue
                for node in assmt_to_names[assmt]:
                    if partial_infer(assmt):
                        partials.add(assmt)
                        assmts_resolved.add(assmt)
            partial_assmts.update(partials)
            return partials

        # Infer assignments
        while True:
            if not resolve_assignments(assignments):
                if not resolve_partial(assignments):
                    break
        inferred = set()
        # First pass
        for entry in scope.entries.values():
            if entry.type is not unspecified_type:
                continue
            entry_type = py_object_type
            if assmts_resolved.issuperset(entry.cf_assignments):
                types = [assmt.inferred_type for assmt in entry.cf_assignments]
                if types and Utils.all(types):
                    entry_type = spanning_type(
                        types, entry.might_overflow, entry.pos)
                    inferred.add(entry)
            self.set_entry_type(entry, entry_type)

        def reinfer():
            dirty = False
            for entry in inferred:
                types = [assmt.infer_type()
                         for assmt in entry.cf_assignments]
                new_type = spanning_type(types, entry.might_overflow, entry.pos)
                if new_type != entry.type:
                    self.set_entry_type(entry, new_type)
                    dirty = True
            return dirty

        # types propagation
        while reinfer():
            pass

        if verbose:
            for entry in inferred:
                message(entry.pos, "inferred '%s' to be of type '%s'" % (
                    entry.name, entry.type))
Ejemplo n.º 56
0
 def find_root_package_dir(self, file_path):
     return Utils.find_root_package_dir(file_path)
Ejemplo n.º 57
0
def compile_cython_modules(profile=False):
    source_root = os.path.abspath(os.path.dirname(__file__))
    compiled_modules = ["Cython.Plex.Scanners",
                        "Cython.Compiler.Scanning",
                        "Cython.Compiler.Parsing",
                        "Cython.Compiler.Visitor",
                        "Cython.Runtime.refnanny"]
    extensions = []

    if sys.version_info[0] >= 3:
        from Cython.Distutils import build_ext as build_ext_orig
        for module in compiled_modules:
            source_file = os.path.join(source_root, *module.split('.'))
            if os.path.exists(source_file + ".py"):
                pyx_source_file = source_file + ".py"
            else:
                pyx_source_file = source_file + ".pyx"
            extensions.append(
                Extension(module, sources = [pyx_source_file])
                )

        class build_ext(build_ext_orig):
            def build_extensions(self):
                # add path where 2to3 installed the transformed sources
                # and make sure Python (re-)imports them from there
                already_imported = [ module for module in sys.modules
                                     if module == 'Cython' or module.startswith('Cython.') ]
                for module in already_imported:
                    del sys.modules[module]
                sys.path.insert(0, os.path.join(source_root, self.build_lib))

                if profile:
                    from Cython.Compiler.Options import directive_defaults
                    directive_defaults['profile'] = True
                    print("Enabled profiling for the Cython binary modules")
                build_ext_orig.build_extensions(self)

        setup_args['ext_modules'] = extensions
        add_command_class("build_ext", build_ext)

    else: # Python 2.x
        from distutils.command.build_ext import build_ext as build_ext_orig
        try:
            class build_ext(build_ext_orig):
                def build_extension(self, ext, *args, **kargs):
                    try:
                        build_ext_orig.build_extension(self, ext, *args, **kargs)
                    except StandardError:
                        print("Compilation of '%s' failed" % ext.sources[0])
            from Cython.Compiler.Main import compile
            from Cython import Utils
            if profile:
                from Cython.Compiler.Options import directive_defaults
                directive_defaults['profile'] = True
                print("Enabled profiling for the Cython binary modules")
            source_root = os.path.dirname(__file__)
            for module in compiled_modules:
                source_file = os.path.join(source_root, *module.split('.'))
                if os.path.exists(source_file + ".py"):
                    pyx_source_file = source_file + ".py"
                else:
                    pyx_source_file = source_file + ".pyx"
                c_source_file = source_file + ".c"
                if not os.path.exists(c_source_file) or \
                   Utils.file_newer_than(pyx_source_file,
                                         Utils.modification_time(c_source_file)):
                    print("Compiling module %s ..." % module)
                    result = compile(pyx_source_file)
                    c_source_file = result.c_file
                if c_source_file:
                    # Py2 distutils can't handle unicode file paths
                    if isinstance(c_source_file, unicode):
                        filename_encoding = sys.getfilesystemencoding()
                        if filename_encoding is None:
                            filename_encoding = sys.getdefaultencoding()
                        c_source_file = c_source_file.encode(filename_encoding)
                    extensions.append(
                        Extension(module, sources = [c_source_file])
                        )
                else:
                    print("Compilation failed")
            if extensions:
                setup_args['ext_modules'] = extensions
                add_command_class("build_ext", build_ext)
        except Exception:
            print('''
ERROR: %s

Extension module compilation failed, looks like Cython cannot run
properly on this system.  To work around this, pass the option
"--no-cython-compile".  This will install a pure Python version of
Cython without compiling its own sources.
''' % sys.exc_info()[1])
            raise
Ejemplo n.º 58
0
 def get_error_description(self):
     path = self.filename
     cwd = Utils.decode_filename(os.getcwd() + os.path.sep)
     if path.startswith(cwd):
         return path[len(cwd):]
     return path