Example #1
0
    def handle_display_options(self, option_order):
        """If there were any non-global "display-only" options
        (--help-commands or the metadata display options) on the command
        line, display the requested info and return true; else return
        false.
        """
        import sys

        if six.PY2 or self.help_commands:
            return _Distribution.handle_display_options(self, option_order)

        # Stdout may be StringIO (e.g. in tests)
        import io

        if not isinstance(sys.stdout, io.TextIOWrapper):
            return _Distribution.handle_display_options(self, option_order)

        # Don't wrap stdout if utf-8 is already the encoding. Provides
        #  workaround for #334.
        if sys.stdout.encoding.lower() in ("utf-8", "utf8"):
            return _Distribution.handle_display_options(self, option_order)

        # Print metadata in UTF-8 no matter the platform
        encoding = sys.stdout.encoding
        errors = sys.stdout.errors
        newline = sys.platform != "win32" and "\n" or None
        line_buffering = sys.stdout.line_buffering

        sys.stdout = io.TextIOWrapper(sys.stdout.detach(), "utf-8", errors, newline, line_buffering)
        try:
            return _Distribution.handle_display_options(self, option_order)
        finally:
            sys.stdout = io.TextIOWrapper(sys.stdout.detach(), encoding, errors, newline, line_buffering)
Example #2
0
    def handle_display_options(self, option_order):
        """If there were any non-global "display-only" options
        (--help-commands or the metadata display options) on the command
        line, display the requested info and return true; else return
        false.
        """
        import sys

        if sys.version_info < (3,) or self.help_commands:
            return _Distribution.handle_display_options(self, option_order)

        # Stdout may be StringIO (e.g. in tests)
        import io
        if not isinstance(sys.stdout, io.TextIOWrapper):
            return _Distribution.handle_display_options(self, option_order)

        # Print metadata in UTF-8 no matter the platform
        encoding = sys.stdout.encoding
        errors = sys.stdout.errors
        newline = sys.platform != 'win32' and '\n' or None
        line_buffering = sys.stdout.line_buffering

        sys.stdout = io.TextIOWrapper(
            sys.stdout.detach(), 'utf-8', errors, newline, line_buffering)
        try:
            return _Distribution.handle_display_options(self, option_order)
        finally:
            sys.stdout = io.TextIOWrapper(
                sys.stdout.detach(), encoding, errors, newline, line_buffering)
Example #3
0
    def test_no_optimize_flag(self):
        # let's create a package that breaks bdist_rpm
        tmp_dir = self.mkdtemp()
        os.environ['HOME'] = tmp_dir   # to confine dir '.rpmdb' creation
        pkg_dir = os.path.join(tmp_dir, 'foo')
        os.mkdir(pkg_dir)
        self.write_file((pkg_dir, 'setup.py'), SETUP_PY)
        self.write_file((pkg_dir, 'foo.py'), '#')
        self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py')
        self.write_file((pkg_dir, 'README'), '')

        dist = Distribution({'name': 'foo', 'version': '0.1',
                             'py_modules': ['foo'],
                             'url': 'xxx', 'author': 'xxx',
                             'author_email': 'xxx'})
        dist.script_name = 'setup.py'
        os.chdir(pkg_dir)

        sys.argv = ['setup.py']
        cmd = bdist_rpm(dist)
        cmd.fix_python = True

        cmd.quiet = 1
        cmd.ensure_finalized()
        cmd.run()

        dist_created = os.listdir(os.path.join(pkg_dir, 'dist'))
        self.assertIn('foo-0.1-1.noarch.rpm', dist_created)

        # bug #2945: upload ignores bdist_rpm files
        self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.src.rpm'), dist.dist_files)
        self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.noarch.rpm'), dist.dist_files)

        os.remove(os.path.join(pkg_dir, 'dist', 'foo-0.1-1.noarch.rpm'))
Example #4
0
 def __init__(self, attrs):
     baseattrs = {}
     py2exeoptions = {}
     py2exeoptions["packages"] = ["translate", "encodings"]
     py2exeoptions["compressed"] = True
     py2exeoptions["excludes"] = [
         "PyLucene", "Tkconstants", "Tkinter", "tcl",
         "enchant",  # Need to do more to support spell checking on Windows
         # strange things unnecessarily included with some versions of pyenchant:
         "win32ui", "_win32sysloader", "win32pipe", "py2exe", "win32com",
         "pywin", "isapi", "_tkinter", "win32api",
     ]
     version = attrs.get("version", translateversion)
     py2exeoptions["dist_dir"] = "translate-toolkit-%s" % version
     py2exeoptions["includes"] = ["lxml", "lxml._elementpath", "psyco"]
     options = {"py2exe": py2exeoptions}
     baseattrs['options'] = options
     if py2exe:
         baseattrs['console'] = translatescripts
         baseattrs['zipfile'] = "translate.zip"
         baseattrs['cmdclass'] = {
             "py2exe": build_exe_map, "innosetup": build_installer
         }
         options["innosetup"] = py2exeoptions.copy()
         options["innosetup"]["install_script"] = []
     baseattrs.update(attrs)
     Distribution.__init__(self, baseattrs)
Example #5
0
    def test_home_installation_scheme(self):
        # This ensure two things:
        # - that --home generates the desired set of directory names
        # - test --home is supported on all platforms
        builddir = self.mkdtemp()
        destination = os.path.join(builddir, "installation")

        dist = Distribution({"name": "foopkg"})
        # script_name need not exist, it just need to be initialized
        dist.script_name = os.path.join(builddir, "setup.py")
        dist.command_obj["build"] = support.DummyCommand(build_base=builddir, build_lib=os.path.join(builddir, "lib"))

        cmd = install(dist)
        cmd.home = destination
        cmd.ensure_finalized()

        self.assertEqual(cmd.install_base, destination)
        self.assertEqual(cmd.install_platbase, destination)

        def check_path(got, expected):
            got = os.path.normpath(got)
            expected = os.path.normpath(expected)
            self.assertEqual(got, expected)

        libdir = os.path.join(destination, "lib", "python")
        check_path(cmd.install_lib, libdir)
        check_path(cmd.install_platlib, libdir)
        check_path(cmd.install_purelib, libdir)
        check_path(cmd.install_headers, os.path.join(destination, "include", "python", "foopkg"))
        check_path(cmd.install_scripts, os.path.join(destination, "bin"))
        check_path(cmd.install_data, destination)
Example #6
0
    def get_build_scripts_cmd(self, target, scripts):
        import sys

        dist = Distribution()
        dist.scripts = scripts
        dist.command_obj["build"] = support.DummyCommand(build_scripts=target, force=1, executable=sys.executable)
        return build_scripts(dist)
Example #7
0
 def test_quiet(self):
     tmp_dir = self.mkdtemp()
     pkg_dir = os.path.join(tmp_dir, 'foo')
     os.mkdir(pkg_dir)
     self.write_file((pkg_dir, 'setup.py'), SETUP_PY)
     self.write_file((pkg_dir, 'foo.py'), '#')
     self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py')
     self.write_file((pkg_dir, 'README'), '')
     dist = Distribution({'name': 'foo',
      'version': '0.1',
      'py_modules': ['foo'],
      'url': 'xxx',
      'author': 'xxx',
      'author_email': 'xxx'})
     dist.script_name = 'setup.py'
     os.chdir(pkg_dir)
     sys.argv = ['setup.py']
     cmd = bdist_rpm(dist)
     cmd.fix_python = True
     cmd.quiet = 1
     cmd.ensure_finalized()
     cmd.run()
     dist_created = os.listdir(os.path.join(pkg_dir, 'dist'))
     self.assertIn('foo-0.1-1.noarch.rpm', dist_created)
     self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.src.rpm'), dist.dist_files)
     self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.noarch.rpm'), dist.dist_files)
Example #8
0
    def _try_compile_deployment_target(self, operator, target):
        orig_environ = os.environ
        os.environ = orig_environ.copy()
        self.addCleanup(setattr, os, 'environ', orig_environ)
        if target is None:
            if os.environ.get('MACOSX_DEPLOYMENT_TARGET'):
                del os.environ['MACOSX_DEPLOYMENT_TARGET']
        else:
            os.environ['MACOSX_DEPLOYMENT_TARGET'] = target
        deptarget_c = os.path.join(self.tmp_dir, 'deptargetmodule.c')
        with open(deptarget_c, 'w') as fp:
            fp.write(textwrap.dedent('                #include <AvailabilityMacros.h>\n\n                int dummy;\n\n                #if TARGET %s MAC_OS_X_VERSION_MIN_REQUIRED\n                #else\n                #error "Unexpected target"\n                #endif\n\n            ' % operator))
        target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
        target = tuple(map(int, target.split('.')))
        target = '%02d%01d0' % target
        deptarget_ext = Extension('deptarget', [deptarget_c], extra_compile_args=['-DTARGET=%s' % (target,)])
        dist = Distribution({'name': 'deptarget',
         'ext_modules': [deptarget_ext]})
        dist.package_dir = self.tmp_dir
        cmd = build_ext(dist)
        cmd.build_lib = self.tmp_dir
        cmd.build_temp = self.tmp_dir
        try:
            cmd.ensure_finalized()
            cmd.run()
        except CompileError:
            self.fail('Wrong deployment target during compilation')

        return
    def test_installation(self):
        source = self.mkdtemp()
        expected = []

        def write_script(name, text):
            expected.append(name)
            f = open(os.path.join(source, name), 'w')
            try:
                f.write(text)
            finally:
                f.close()

        write_script('script1.py', '#! /usr/bin/env python2.3\n# bogus script w/ Python sh-bang\npass\n')
        write_script('script2.py', '#!/usr/bin/python\n# bogus script w/ Python sh-bang\npass\n')
        write_script('shell.sh', '#!/bin/sh\n# bogus shell script w/ sh-bang\nexit 0\n')
        target = self.mkdtemp()
        dist = Distribution()
        dist.command_obj['build'] = support.DummyCommand(build_scripts=source)
        dist.command_obj['install'] = support.DummyCommand(install_scripts=target, force=1, skip_build=1)
        cmd = install_scripts(dist)
        cmd.finalize_options()
        cmd.run()
        installed = os.listdir(target)
        for name in expected:
            self.assertIn(name, installed)
Example #10
0
 def __init__(self, *args, **kwargs):
     Distribution.__init__(self, *args, **kwargs)
     self.cmdclass.setdefault("build_shortcuts", build_shortcuts)
     self.cmdclass.setdefault("update_icon_cache", update_icon_cache)
     self.cmdclass.setdefault("install_shortcuts", install_shortcuts)
     self.cmdclass.setdefault("build", build)
     self.cmdclass.setdefault("install", install)
Example #11
0
    def test_installation(self):
        source = self.mkdtemp()
        expected = []

        def write_script(name, text):
            expected.append(name)
            f = open(os.path.join(source, name), "w")
            f.write(text)
            f.close()

        write_script("script1.py", ("#! /usr/bin/env python2.3\n"
                                    "# bogus script w/ Python sh-bang\n"
                                    "pass\n"))
        write_script("script2.py", ("#!/usr/bin/python\n"
                                    "# bogus script w/ Python sh-bang\n"
                                    "pass\n"))
        write_script("shell.sh", ("#!/bin/sh\n"
                                  "# bogus shell script w/ sh-bang\n"
                                  "exit 0\n"))

        target = self.mkdtemp()
        dist = Distribution()
        dist.command_obj["build"] = support.DummyCommand(build_scripts=source)
        dist.command_obj["install"] = support.DummyCommand(
            install_scripts=target,
            force=1,
            skip_build=1,
            )
        cmd = install_scripts(dist)
        cmd.finalize_options()
        cmd.run()

        installed = os.listdir(target)
        for name in expected:
            self.assert_(name in installed)
Example #12
0
    def _get_build_extension(self, extension=None, lib_dir=None, temp_dir=None,
                             pgo_step_name=None, _build_ext=build_ext):
        self._clear_distutils_mkpath_cache()
        dist = Distribution()
        config_files = dist.find_config_files()
        try:
            config_files.remove('setup.cfg')
        except ValueError:
            pass
        dist.parse_config_files(config_files)

        if not temp_dir:
            temp_dir = lib_dir
        add_pgo_flags = self._add_pgo_flags

        if pgo_step_name:
            base_build_ext = _build_ext
            class _build_ext(_build_ext):
                def build_extensions(self):
                    add_pgo_flags(self, pgo_step_name, temp_dir)
                    base_build_ext.build_extensions(self)

        build_extension = _build_ext(dist)
        build_extension.finalize_options()
        if temp_dir:
            temp_dir = py3compat.cast_bytes_py2(temp_dir, encoding=sys.getfilesystemencoding())
            build_extension.build_temp = temp_dir
        if lib_dir:
            lib_dir = py3compat.cast_bytes_py2(lib_dir, encoding=sys.getfilesystemencoding())
            build_extension.build_lib = lib_dir
        if extension is not None:
            build_extension.extensions = [extension]
        return build_extension
Example #13
0
    def test_build_ext(self):
        xx_c = os.path.join(self.tmp_dir, 'xxmodule.c')
        xx_ext = Extension('xx', [xx_c])
        dist = Distribution({'name': 'xx', 'ext_modules': [xx_ext]})
        dist.package_dir = self.tmp_dir
        cmd = build_ext(dist)
        if os.name == "nt":
            # On Windows, we must build a debug version iff running
            # a debug build of Python
            cmd.debug = sys.executable.endswith("_d.exe")
        cmd.build_lib = self.tmp_dir
        cmd.build_temp = self.tmp_dir

        old_stdout = sys.stdout
        if not test_support.verbose:
            # silence compiler output
            sys.stdout = StringIO()
        try:
            cmd.ensure_finalized()
            cmd.run()
        finally:
            sys.stdout = old_stdout

        import xx

        for attr in ('error', 'foo', 'new', 'roj'):
            self.assert_(hasattr(xx, attr))

        self.assertEquals(xx.foo(2, 5), 7)
        self.assertEquals(xx.foo(13,15), 28)
        self.assertEquals(xx.new().demo(), None)
        doc = 'This is a template module just for instruction.'
        self.assertEquals(xx.__doc__, doc)
        self.assert_(isinstance(xx.Null(), xx.Null))
        self.assert_(isinstance(xx.Str(), xx.Str))
Example #14
0
    def finalize_options(self):
        self.cmdclass['config'] = config

        self.cmdclass['build'] = build
        self.cmdclass['build_py'] = build_py
        self.cmdclass['build_qk'] = build_qk
        self.cmdclass['build_ext'] = build_ext
        self.cmdclass['build_qext'] = build_qext

        self.cmdclass['install'] = install
        self.cmdclass['install_qlib'] = install_qlib
        self.cmdclass['install_qext'] = install_qext

        self.qhome = os.getenv('QHOME') or os.path.join(os.getenv('HOME'), 'q')
        u = os.uname()
        if u[0] == 'Linux':
            o = 'l'
        elif u[0] == 'SunOS':
            o = 'v' if u[-1] == 'i86pc' else 's'
        else:
            sys.stderr.write("Unknown platform: %s\n" % str(u))
            sys.exit(1)
        bits = 8 * get_config_var('SIZEOF_VOID_P')
        self.qarch = "%s%d" % (o, bits)
        self.install_data = os.path.join(self.qhome, self.qarch)
        self.kxver = self.get_kxver(self.qhome)
        self.qexecutable = os.path.join(self.qhome, self.qarch, 'q')
        _Distribution.finalize_options(self)
        for ext in self.ext_modules + self.qext_modules:
            ext.define_macros.append(('KXVER', self.kxver[0]))
            ext.define_macros.append(('QVER', self.kxver.replace('.', '_')))
            if sys.hexversion >= 0x3000000:
               ext.define_macros.append(('PY3K', '1'))
Example #15
0
 def __init__(self, attrs=None):
     have_package_data = hasattr(self, "package_data")
     if not have_package_data:
         self.package_data = {}
     _attrs_dict = attrs or {}
     if 'features' in _attrs_dict or 'require_features' in _attrs_dict:
         Feature.warn_deprecated()
     self.require_features = []
     self.features = {}
     self.dist_files = []
     self.src_root = attrs and attrs.pop("src_root", None)
     self.patch_missing_pkg_info(attrs)
     # Make sure we have any eggs needed to interpret 'attrs'
     if attrs is not None:
         self.dependency_links = attrs.pop('dependency_links', [])
         assert_string_list(self,'dependency_links',self.dependency_links)
     if attrs and 'setup_requires' in attrs:
         self.fetch_build_eggs(attrs.pop('setup_requires'))
     for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
         if not hasattr(self,ep.name):
             setattr(self,ep.name,None)
     _Distribution.__init__(self,attrs)
     if isinstance(self.metadata.version, numeric_types):
         # Some people apparently take "version number" too literally :)
         self.metadata.version = str(self.metadata.version)
Example #16
0
    def finalize_options(self):
        self.cmdclass['config'] = config

        self.cmdclass['build'] = build
        self.cmdclass['build_qk'] = build_qk
        self.cmdclass['build_ext'] = build_ext
        self.cmdclass['build_qext'] = build_qext

        self.cmdclass['install'] = install
        self.cmdclass['install_qlib'] = install_qlib
        self.cmdclass['install_qext'] = install_qext

        self.qhome = os.getenv('QHOME') or os.path.join(os.getenv('HOME'), 'q')
        u = os.uname()
        if u[0] == 'Linux':
            o = 'l'
        elif u[0] == 'SunOS':
            o = 'v' if u[-1] == 'i86pc' else 's'
        else:
            sys.stderr.write("Unknown platform: %s\n" % str(u))
            sys.exit(1)
        self.qarch = o+('32', '64')[sys.maxint > 2147483647]
        self.install_data = os.path.join(self.qhome, self.qarch)
        self.kxver = self.get_kxver(self.qhome)
        self.qexecutable = os.path.join(self.qhome, self.qarch, 'q')
        _Distribution.finalize_options(self)
        for ext in self.ext_modules + self.qext_modules:
            ext.define_macros.append(('KXVER', self.kxver[0]))
Example #17
0
 def __init__ (self, attrs=None):
   #super(Gist_dist,self).__init__(attrs)
   Distribution.__init__(self, attrs)
   self.cmdclass = {
     'build':          gist_build,    # override
     'patch':          patch_cmd,
     'mkconfig':       mkconfig_cmd,
   }
 def __init__(self, attrs = None):
     # A list of (sconscripts, pre_hook, post_hook, src, parent_names)
     self.scons_data = []
     # A list of installable libraries
     self.installed_libraries = []
     # A dict of pkg_config files to generate/install
     self.installed_pkg_config = {}
     Distribution.__init__(self, attrs)
 def __init__(self, *args):
     Distribution.__init__(self, *args)
     self.com_server = []
     self.services = []
     self.windows = [{'script': 'gogames-screensaver',
                      'icon_resources': [(1,'icons/icon.ico')]}]
     self.console = []
     self.zipfile = None
Example #20
0
    def _try_compile_deployment_target(self, operator, target):
        orig_environ = os.environ
        os.environ = orig_environ.copy()
        self.addCleanup(setattr, os, 'environ', orig_environ)

        if target is None:
            if os.environ.get('MACOSX_DEPLOYMENT_TARGET'):
                del os.environ['MACOSX_DEPLOYMENT_TARGET']
        else:
            os.environ['MACOSX_DEPLOYMENT_TARGET'] = target

        deptarget_c = os.path.join(self.tmp_dir, 'deptargetmodule.c')

        with open(deptarget_c, 'w') as fp:
            fp.write(textwrap.dedent('''\
                #include <AvailabilityMacros.h>

                int dummy;

                #if TARGET %s MAC_OS_X_VERSION_MIN_REQUIRED
                #else
                #error "Unexpected target"
                #endif

            ''' % operator))

        # get the deployment target that the interpreter was built with
        target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
        target = tuple(map(int, target.split('.')))
        target = '%02d%01d0' % target
        deptarget_ext = Extension(
            'deptarget',
            [deptarget_c],
            extra_compile_args=['-DTARGET=%s'%(target,)],
        )
        dist = Distribution({
            'name': 'deptarget',
            'ext_modules': [deptarget_ext]
        })
        dist.package_dir = self.tmp_dir
        cmd = build_ext(dist)
        cmd.build_lib = self.tmp_dir
        cmd.build_temp = self.tmp_dir

        try:
            old_stdout = sys.stdout
            if not support.verbose:
                # silence compiler output
                sys.stdout = StringIO()
            try:
                cmd.ensure_finalized()
                cmd.run()
            finally:
                sys.stdout = old_stdout

        except CompileError:
            self.fail("Wrong deployment target during compilation")
 def __init__(self, attrs=None):
     self.twisted_plugins = None
     Distribution.__init__(self, attrs)
     self.cmdclass = {'install': twisted_install,
                      'install_twisted_plugins': install_twisted_plugins,
                      'build': twisted_build,
                      'build_twisted_plugins': build_twisted_plugins,
                      'sdist': twisted_sdist,
     }
Example #22
0
 def test_get_sdist_filelist(self, sample_pkg):
     # we can get an sdist filelist
     dist = Distribution(SETUP_ATTRS)
     dist.script_name = "setup.py"
     cmd = sdist_check(dist)
     cmd.ensure_finalized()
     file_list = cmd._get_sdist_filelist()
     assert "sample_test/__init__.py" in file_list.files
     assert "setup.py" in file_list.files
Example #23
0
def _get_build_extension():
    dist = Distribution()
    # Ensure the build respects distutils configuration by parsing
    # the configuration files
    config_files = dist.find_config_files()
    dist.parse_config_files(config_files)
    build_extension = build_ext(dist)
    build_extension.finalize_options()
    return build_extension
 def __init__(self, attrs):
     self.com_server = []
     self.ctypes_com_server = []
     self.service = ["cfnbootstrap.winhup"]
     self.isapi = []
     self.windows = []
     self.zipfile = 'library.zip'
     self.console = ['bin/cfn-init', 'bin/cfn-signal', 'bin/cfn-get-metadata', 'bin/cfn-hup', 'bin/cfn-elect-cmd-leader', 'bin/cfn-send-cmd-result']
     Distribution.__init__(self, attrs)
    def _try_compile_deployment_target(self, operator, target):
        orig_environ = os.environ
        os.environ = orig_environ.copy()
        self.addCleanup(setattr, os, 'environ', orig_environ)

        if target is None:
            if os.environ.get('MACOSX_DEPLOYMENT_TARGET'):
                del os.environ['MACOSX_DEPLOYMENT_TARGET']
        else:
            os.environ['MACOSX_DEPLOYMENT_TARGET'] = target

        deptarget_c = os.path.join(self.tmp_dir, 'deptargetmodule.c')

        with open(deptarget_c, 'w') as fp:
            fp.write(textwrap.dedent('''\
                #include <AvailabilityMacros.h>

                int dummy;

                #if TARGET %s MAC_OS_X_VERSION_MIN_REQUIRED
                #else
                #error "Unexpected target"
                #endif

            ''' % operator))

        # get the deployment target that the interpreter was built with
        target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
        target = tuple(map(int, target.split('.')[0:2]))
        # format the target value as defined in the Apple
        # Availability Macros.  We can't use the macro names since
        # at least one value we test with will not exist yet.
        if target[1] < 10:
            # for 10.1 through 10.9.x -> "10n0"
            target = '%02d%01d0' % target
        else:
            # for 10.10 and beyond -> "10nn00"
            target = '%02d%02d00' % target
        deptarget_ext = Extension(
            'deptarget',
            [deptarget_c],
            extra_compile_args=['-DTARGET=%s'%(target,)],
        )
        dist = Distribution({
            'name': 'deptarget',
            'ext_modules': [deptarget_ext]
        })
        dist.package_dir = self.tmp_dir
        cmd = build_ext(dist)
        cmd.build_lib = self.tmp_dir
        cmd.build_temp = self.tmp_dir

        try:
            cmd.ensure_finalized()
            cmd.run()
        except CompileError:
            self.fail("Wrong deployment target during compilation")
Example #26
0
 def __init__(self, attrs=None):
     self.twisted_plugins = None
     Distribution.__init__(self, attrs)
     self.cmdclass = {
         "install": twisted_install,
         "install_twisted_plugins": install_twisted_plugins,
         "build": twisted_build,
         "build_twisted_plugins": build_twisted_plugins,
         "sdist": twisted_sdist,
     }
Example #27
0
 def __init__(self, attrs):
     self.ctypes_com_server = []
     self.com_server = []
     self.services = []
     self.windows = [dict(script="luminotes.py", icon_resources=[(0, "static\\images\\luminotes.ico")])]
     self.console = []
     self.service = []
     self.isapi = []
     self.zipfile = "lib\luminotes.zip"
     Distribution.__init__(self, attrs)
Example #28
0
    def finalize_options(self):
        _Distribution.finalize_options(self)
        if self.features:
            self._set_global_opts_from_features()

        for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
            value = getattr(self,ep.name,None)
            if value is not None:
                ep.require(installer=self.fetch_build_egg)
                ep.load()(self, ep.name, value)
Example #29
0
    def finalize_options(self):
        Distribution.finalize_options(self)

        try:
            i = self.script_args.index('--disable-ext')
        except ValueError:
            self.disable_ext = False
        else:
            self.disable_ext = True
            self.script_args.pop(i)
Example #30
0
 def test_check_bad_filenames_filename(self, sample_pkg, capsys):
     # we do not get any output if there are no bad filenames.
     dist = Distribution(SETUP_ATTRS)
     dist.script_name = "setup.py"
     sample_pkg.join("MANIFEST.in").write("graft sample_test\n")
     cmd = sdist_check(dist)
     cmd.ensure_finalized()
     cmd.check_bad_filenames()
     out, err = capsys.readouterr()
     assert err == ""
Example #31
0
 def __init__(self, attrs=None):
     self.k_modules = None
     self.q_modules = None
     self.qext_modules = None
     _Distribution.__init__(self, attrs)
Example #32
0
 def test_get_source_files(self):
     modules = [Extension('foo', ['xxx'])]
     dist = Distribution({'name': 'xx', 'ext_modules': modules})
     cmd = build_ext(dist)
     cmd.ensure_finalized()
     self.assertEqual(cmd.get_source_files(), ['xxx'])
Example #33
0
 def __init__(self, *args, **kwargs):
     self.skip_libsumo = False
     Distribution.__init__(self, *args, **kwargs)
Example #34
0
def get_dist_class(name):
    # in case of setuptools this returns the extended commands
    return Distribution({}).get_command_class(name)
Example #35
0
 def distclass(**kwargs):
     return Distribution(kwargs)
Example #36
0
    def _try_compile_deployment_target(self, operator, target):
        orig_environ = os.environ
        os.environ = orig_environ.copy()
        self.addCleanup(setattr, os, 'environ', orig_environ)

        if target is None:
            if os.environ.get('MACOSX_DEPLOYMENT_TARGET'):
                del os.environ['MACOSX_DEPLOYMENT_TARGET']
        else:
            os.environ['MACOSX_DEPLOYMENT_TARGET'] = target

        deptarget_c = os.path.join(self.tmp_dir, 'deptargetmodule.c')

        with open(deptarget_c, 'w') as fp:
            fp.write(
                textwrap.dedent('''\
                #include <AvailabilityMacros.h>

                int dummy;

                #if TARGET %s MAC_OS_X_VERSION_MIN_REQUIRED
                #else
                #error "Unexpected target"
                #endif

            ''' % operator))

        # get the deployment target that the interpreter was built with
        target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
        target = tuple(map(int, target.split('.')[0:2]))
        # format the target value as defined in the Apple
        # Availability Macros.  We can't use the macro names since
        # at least one value we test with will not exist yet.
        if target[1] < 10:
            # for 10.1 through 10.9.x -> "10n0"
            target = '%02d%01d0' % target
        else:
            # for 10.10 and beyond -> "10nn00"
            target = '%02d%02d00' % target
        deptarget_ext = Extension(
            'deptarget',
            [deptarget_c],
            extra_compile_args=['-DTARGET=%s' % (target, )],
        )
        dist = Distribution({
            'name': 'deptarget',
            'ext_modules': [deptarget_ext]
        })
        dist.package_dir = self.tmp_dir
        cmd = build_ext(dist)
        cmd.build_lib = self.tmp_dir
        cmd.build_temp = self.tmp_dir

        try:
            old_stdout = sys.stdout
            if not support.verbose:
                # silence compiler output
                sys.stdout = StringIO()
            try:
                cmd.ensure_finalized()
                cmd.run()
            finally:
                sys.stdout = old_stdout

        except CompileError:
            self.fail("Wrong deployment target during compilation")
Example #37
0
def _build(tmpdir, ext, hpy_devel, hpy_abi, compiler_verbose=0, debug=None):
    # XXX compact but horrible :-(
    from distutils.core import Distribution
    import distutils.errors
    import distutils.log
    #
    dist = Distribution()
    dist.parse_config_files()
    if debug is None:
        debug = sys.flags.debug
    options_build_ext = dist.get_option_dict('build_ext')
    options_build_ext['debug'] = ('ffiplatform', debug)
    options_build_ext['force'] = ('ffiplatform', True)
    options_build_ext['build_lib'] = ('ffiplatform', tmpdir)
    options_build_ext['build_temp'] = ('ffiplatform', tmpdir)
    options_build_py = dist.get_option_dict('build_py')
    options_build_py['build_lib'] = ('ffiplatform', tmpdir)

    # this is the equivalent of passing --hpy-abi from setup.py's command line
    dist.hpy_abi = hpy_abi
    dist.hpy_ext_modules = [ext]
    hpy_devel.fix_distribution(dist)

    old_level = distutils.log.set_threshold(0) or 0
    old_dir = os.getcwd()
    try:
        os.chdir(tmpdir)
        distutils.log.set_verbosity(compiler_verbose)
        dist.run_command('build_ext')
        cmd_obj = dist.get_command_obj('build_ext')
        outputs = cmd_obj.get_outputs()
        sonames = [
            x for x in outputs
            if not x.endswith(".py") and not x.endswith(".pyc")
        ]
        assert len(
            sonames) == 1, 'build_ext is not supposed to return multiple DLLs'
        soname = sonames[0]
    finally:
        os.chdir(old_dir)
        distutils.log.set_threshold(old_level)

    return soname
Example #38
0
"""


def print_box(msg):
    lines = msg.split('\n')
    size = max(len(l) + 1 for l in lines)
    print('-' * (size + 2))
    for l in lines:
        print('|{}{}|'.format(l, ' ' * (size - len(l))))
    print('-' * (size + 2))


if __name__ == '__main__':
    # Parse the command line and check the arguments
    # before we proceed with building deps and setup
    dist = Distribution()
    dist.script_name = sys.argv[0]
    dist.script_args = sys.argv[1:]
    try:
        ok = dist.parse_command_line()
    except DistutilsArgError as msg:
        raise SystemExit(
            core.gen_usage(dist.script_name) + "\nerror: %s" % msg)
    if not ok:
        sys.exit()

    if RUN_BUILD_DEPS:
        build_deps()

    extensions, cmdclass, packages, entry_points, extra_install_requires = configure_extension_build(
    )
Example #39
0
    def test_get_outputs(self):
        tmp_dir = self.mkdtemp()
        c_file = os.path.join(tmp_dir, 'foo.c')
        self.write_file(c_file, 'void initfoo(void) {};\n')
        ext = Extension('foo', [c_file])
        dist = Distribution({'name': 'xx',
                             'ext_modules': [ext]})
        cmd = build_ext(dist)
        support.fixup_build_ext(cmd)
        cmd.ensure_finalized()
        self.assertEqual(len(cmd.get_outputs()), 1)

        cmd.build_lib = os.path.join(self.tmp_dir, 'build')
        cmd.build_temp = os.path.join(self.tmp_dir, 'tempt')

        # issue #5977 : distutils build_ext.get_outputs
        # returns wrong result with --inplace
        other_tmp_dir = os.path.realpath(self.mkdtemp())
        old_wd = os.getcwd()
        os.chdir(other_tmp_dir)
        try:
            cmd.inplace = 1
            cmd.run()
            so_file = cmd.get_outputs()[0]
        finally:
            os.chdir(old_wd)
        self.assertTrue(os.path.exists(so_file))
        self.assertEqual(os.path.splitext(so_file)[-1],
                         sysconfig.get_config_var('SO'))
        so_dir = os.path.dirname(so_file)
        self.assertEqual(so_dir, other_tmp_dir)
        cmd.compiler = None
        cmd.inplace = 0
        cmd.run()
        so_file = cmd.get_outputs()[0]
        self.assertTrue(os.path.exists(so_file))
        self.assertEqual(os.path.splitext(so_file)[-1],
                         sysconfig.get_config_var('SO'))
        so_dir = os.path.dirname(so_file)
        self.assertEqual(so_dir, cmd.build_lib)

        # inplace = 0, cmd.package = 'bar'
        build_py = cmd.get_finalized_command('build_py')
        build_py.package_dir = {'': 'bar'}
        path = cmd.get_ext_fullpath('foo')
        # checking that the last directory is the build_dir
        path = os.path.split(path)[0]
        self.assertEqual(path, cmd.build_lib)

        # inplace = 1, cmd.package = 'bar'
        cmd.inplace = 1
        other_tmp_dir = os.path.realpath(self.mkdtemp())
        old_wd = os.getcwd()
        os.chdir(other_tmp_dir)
        try:
            path = cmd.get_ext_fullpath('foo')
        finally:
            os.chdir(old_wd)
        # checking that the last directory is bar
        path = os.path.split(path)[0]
        lastdir = os.path.split(path)[-1]
        self.assertEqual(lastdir, 'bar')
Example #40
0
    def cython(self, line, cell):
        """Compile and import everything from a Cython code cell.

        The contents of the cell are written to a `.pyx` file in the
        directory `IPYTHONDIR/cython` using a filename with the hash of the code.
        This file is then cythonized and compiled. The resulting module
        is imported and all of its symbols are injected into the user's
        namespace. The usage is similar to that of `%%cython_pyximport` but
        you don't have to pass a module name::

        %%cython
        def f(x):
            return 2.0*x
        """
        args = parse_argstring(self.cython, line)
        code = cell if cell.endswith('\n') else cell + '\n'
        lib_dir = os.path.join(self.shell.ipython_dir, 'cython')
        cython_include_dirs = ['.']
        force = args.force
        quiet = True
        ctx = Context(cython_include_dirs, default_options)
        key = code, sys.version_info, sys.executable, Cython.__version__
        module_name = "_cython_magic_" + hashlib.md5(
            str(key).encode('utf-8')).hexdigest()
        so_ext = [
            ext for ext, _, mod_type in imp.get_suffixes()
            if mod_type == imp.C_EXTENSION
        ][0]
        module_path = os.path.join(lib_dir, module_name + so_ext)

        if not os.path.exists(lib_dir):
            os.makedirs(lib_dir)

        if force or not os.path.isfile(module_path):
            cflags = []
            c_include_dirs = []
            if 'numpy' in code:
                import numpy
                c_include_dirs.append(numpy.get_include())
            pyx_file = os.path.join(lib_dir, module_name + '.pyx')
            pyx_file = py3compat.cast_bytes_py2(
                pyx_file, encoding=sys.getfilesystemencoding())
            with io.open(pyx_file, 'w', encoding='utf-8') as f:
                f.write(code)
            extension = Extension(name=module_name,
                                  sources=[pyx_file],
                                  include_dirs=c_include_dirs,
                                  extra_compile_args=cflags)
            dist = Distribution()
            config_files = dist.find_config_files()
            try:
                config_files.remove('setup.cfg')
            except ValueError:
                pass
            dist.parse_config_files(config_files)
            build_extension = build_ext(dist)
            build_extension.finalize_options()
            try:
                build_extension.extensions = cythonize([extension],
                                                       ctx=ctx,
                                                       quiet=quiet)
            except CompileError:
                return
            build_extension.build_temp = os.path.dirname(pyx_file)
            build_extension.build_lib = lib_dir
            build_extension.run()
            self._code_cache[key] = module_name

        module = imp.load_dynamic(module_name, module_path)
        self._import_all(module)
Example #41
0
def compile_c_extension(
    generated_source_path: str,
    build_dir: Optional[str] = None,
    verbose: bool = False,
    keep_asserts: bool = True,
) -> str:
    """Compile the generated source for a parser generator into an extension module.

    The extension module will be generated in the same directory as the provided path
    for the generated source, with the same basename (in addition to extension module
    metadata). For example, for the source mydir/parser.c the generated extension
    in a darwin system with python 3.8 will be mydir/parser.cpython-38-darwin.so.

    If *build_dir* is provided, that path will be used as the temporary build directory
    of distutils (this is useful in case you want to use a temporary directory).
    """
    import distutils.log
    from distutils.core import Distribution, Extension
    from distutils.command.clean import clean  # type: ignore
    from distutils.command.build_ext import build_ext  # type: ignore
    from distutils.tests.support import fixup_build_ext  # type: ignore

    if verbose:
        distutils.log.set_verbosity(distutils.log.DEBUG)

    source_file_path = pathlib.Path(generated_source_path)
    extension_name = source_file_path.stem
    extra_compile_args = get_extra_flags("CFLAGS", "PY_CFLAGS_NODIST")
    extra_compile_args.append("-DPy_BUILD_CORE_MODULE")
    # Define _Py_TEST_PEGEN to not call PyAST_Validate() in Parser/pegen.c
    extra_compile_args.append('-D_Py_TEST_PEGEN')
    extra_link_args = get_extra_flags("LDFLAGS", "PY_LDFLAGS_NODIST")
    if keep_asserts:
        extra_compile_args.append("-UNDEBUG")
    extension = [
        Extension(
            extension_name,
            sources=[
                str(MOD_DIR.parent.parent.parent / "Python" / "Python-ast.c"),
                str(MOD_DIR.parent.parent.parent / "Python" / "asdl.c"),
                str(MOD_DIR.parent.parent.parent / "Parser" / "tokenizer.c"),
                str(MOD_DIR.parent.parent.parent / "Parser" / "pegen.c"),
                str(MOD_DIR.parent.parent.parent / "Parser" /
                    "string_parser.c"),
                str(MOD_DIR.parent / "peg_extension" / "peg_extension.c"),
                generated_source_path,
            ],
            include_dirs=[
                str(MOD_DIR.parent.parent.parent / "Include" / "internal"),
                str(MOD_DIR.parent.parent.parent / "Parser"),
            ],
            extra_compile_args=extra_compile_args,
            extra_link_args=extra_link_args,
        )
    ]
    dist = Distribution({"name": extension_name, "ext_modules": extension})
    cmd = build_ext(dist)
    fixup_build_ext(cmd)
    cmd.inplace = True
    if build_dir:
        cmd.build_temp = build_dir
        cmd.build_lib = build_dir
    cmd.ensure_finalized()
    cmd.run()

    extension_path = source_file_path.parent / cmd.get_ext_filename(
        extension_name)
    shutil.move(cmd.get_ext_fullpath(extension_name), extension_path)

    cmd = clean(dist)
    cmd.finalize_options()
    cmd.run()

    return extension_path
Example #42
0
    def test_create_pypirc(self):
        # this test makes sure a .pypirc file
        # is created when requested.

        # let's create a fake distribution
        # and a register instance
        dist = Distribution()
        dist.metadata.url = 'xxx'
        dist.metadata.author = 'xxx'
        dist.metadata.author_email = 'xxx'
        dist.metadata.name = 'xxx'
        dist.metadata.version =  'xxx'
        cmd = register(dist)

        # we shouldn't have a .pypirc file yet
        self.assert_(not os.path.exists(self.rc))

        # patching raw_input and getpass.getpass
        # so register gets happy
        #
        # Here's what we are faking :
        # use your existing login (choice 1.)
        # Username : '******'
        # Password : '******'
        # Save your login (y/N)? : 'y'
        inputs = RawInputs('1', 'tarek', 'y')
        from distutils.command import register as register_module
        register_module.raw_input = inputs.__call__
        def _getpass(prompt):
            return 'xxx'
        register_module.getpass.getpass = _getpass
        class FakeServer(object):
            def __init__(self):
                self.calls = []

            def __call__(self, *args):
                # we want to compare them, so let's store
                # something comparable
                els = args[0].items()
                els.sort()
                self.calls.append(tuple(els))
                return 200, 'OK'

        cmd.post_to_server = pypi_server = FakeServer()

        # let's run the command
        cmd.run()

        # we should have a brand new .pypirc file
        self.assert_(os.path.exists(self.rc))

        # with the content similar to WANTED_PYPIRC
        content = open(self.rc).read()
        self.assertEquals(content, WANTED_PYPIRC)

        # now let's make sure the .pypirc file generated
        # really works : we shouldn't be asked anything
        # if we run the command again
        def _no_way(prompt=''):
            raise AssertionError(prompt)
        register_module.raw_input = _no_way

        cmd.run()

        # let's see what the server received : we should
        # have 2 similar requests
        self.assert_(len(pypi_server.calls), 2)
        self.assert_(pypi_server.calls[0], pypi_server.calls[1])
Example #43
0
def d2to1(dist, attr, value):
    """Implements the actual d2to1 setup() keyword.  When used, this should be
    the only keyword in your setup() aside from `setup_requires`.

    If given as a string, the value of d2to1 is assumed to be the relative path
    to the setup.cfg file to use.  Otherwise, if it evaluates to true, it
    simply assumes that d2to1 should be used, and the default 'setup.cfg' is
    used.

    This works by reading the setup.cfg file, parsing out the supported
    metadata and command options, and using them to rebuild the
    `DistributionMetadata` object and set the newly added command options.

    The reason for doing things this way is that a custom `Distribution` class
    will not play nicely with setup_requires; however, this implementation may
    not work well with distributions that do use a `Distribution` subclass.
    """

    if not value:
        return
    if isinstance(value, six.string_types):
        path = os.path.abspath(value)
    else:
        path = os.path.abspath('setup.cfg')
    if not os.path.exists(path):
        raise DistutilsFileError(
            'The setup.cfg file %s does not exist.' % path)

    # Converts the setup.cfg file to setup() arguments
    try:
        attrs = cfg_to_args(path)
    except:
        e = sys.exc_info()[1]
        raise DistutilsSetupError(
            'Error parsing %s: %s: %s' % (path, e.__class__.__name__,
                                          e.args[0]))

    # Repeat some of the Distribution initialization code with the newly
    # provided attrs
    if attrs:
        # Skips 'options' and 'licence' support which are rarely used; may add
        # back in later if demanded
        for key, val in six.iteritems(attrs):
            if hasattr(dist.metadata, 'set_' + key):
                getattr(dist.metadata, 'set_' + key)(val)
            elif hasattr(dist.metadata, key):
                setattr(dist.metadata, key, val)
            elif hasattr(dist, key):
                setattr(dist, key, val)
            else:
                msg = 'Unknown distribution option: %s' % repr(key)
                warnings.warn(msg)

    # Re-finalize the underlying Distribution
    _Distribution.finalize_options(dist)

    # This bit comes out of distribute/setuptools
    if isinstance(dist.metadata.version, six.integer_types + (float,)):
        # Some people apparently take "version number" too literally :)
        dist.metadata.version = str(dist.metadata.version)

    # This bit of hackery is necessary so that the Distribution will ignore
    # normally unsupport command options (namely pre-hooks and post-hooks).
    # dist.command_options is normally a dict mapping command names to dicts of
    # their options.  Now it will be a defaultdict that returns IgnoreDicts for
    # the each command's options so we can pass through the unsupported options
    ignore = ['pre_hook.*', 'post_hook.*']
    dist.command_options = DefaultGetDict(lambda: IgnoreDict(ignore))
Example #44
0
    def test_finalize_options(self):
        # Make sure Python's include directories (for Python.h, pyconfig.h,
        # etc.) are in the include search path.
        modules = [Extension('foo', ['xxx'], optional=False)]
        dist = Distribution({'name': 'xx', 'ext_modules': modules})
        cmd = self.build_ext(dist)
        cmd.finalize_options()

        py_include = sysconfig.get_python_inc()
        for p in py_include.split(os.path.pathsep):
            self.assertIn(p, cmd.include_dirs)

        plat_py_include = sysconfig.get_python_inc(plat_specific=1)
        for p in plat_py_include.split(os.path.pathsep):
            self.assertIn(p, cmd.include_dirs)

        # make sure cmd.libraries is turned into a list
        # if it's a string
        cmd = self.build_ext(dist)
        cmd.libraries = 'my_lib, other_lib lastlib'
        cmd.finalize_options()
        self.assertEqual(cmd.libraries, ['my_lib', 'other_lib', 'lastlib'])

        # make sure cmd.library_dirs is turned into a list
        # if it's a string
        cmd = self.build_ext(dist)
        cmd.library_dirs = 'my_lib_dir%sother_lib_dir' % os.pathsep
        cmd.finalize_options()
        self.assertIn('my_lib_dir', cmd.library_dirs)
        self.assertIn('other_lib_dir', cmd.library_dirs)

        # make sure rpath is turned into a list
        # if it's a string
        cmd = self.build_ext(dist)
        cmd.rpath = 'one%stwo' % os.pathsep
        cmd.finalize_options()
        self.assertEqual(cmd.rpath, ['one', 'two'])

        # make sure cmd.link_objects is turned into a list
        # if it's a string
        cmd = build_ext(dist)
        cmd.link_objects = 'one two,three'
        cmd.finalize_options()
        self.assertEqual(cmd.link_objects, ['one', 'two', 'three'])

        # XXX more tests to perform for win32

        # make sure define is turned into 2-tuples
        # strings if they are ','-separated strings
        cmd = self.build_ext(dist)
        cmd.define = 'one,two'
        cmd.finalize_options()
        self.assertEqual(cmd.define, [('one', '1'), ('two', '1')])

        # make sure undef is turned into a list of
        # strings if they are ','-separated strings
        cmd = self.build_ext(dist)
        cmd.undef = 'one,two'
        cmd.finalize_options()
        self.assertEqual(cmd.undef, ['one', 'two'])

        # make sure swig_opts is turned into a list
        cmd = self.build_ext(dist)
        cmd.swig_opts = None
        cmd.finalize_options()
        self.assertEqual(cmd.swig_opts, [])

        cmd = self.build_ext(dist)
        cmd.swig_opts = '1 2'
        cmd.finalize_options()
        self.assertEqual(cmd.swig_opts, ['1', '2'])
Example #45
0
def readme():
    with open('README.md') as f:
        return f.read()


# from https://github.com/ninjaaron/fast-entry_points/ issue https://github.com/sagemathinc/cocalc/issues/2259
import fastentrypoints
from setuptools import setup, find_packages

# This checks, if setup.py is run with 'install --user'
# in that case we assume it is installed for development and do NOT change the python executable.
# Therefore we want to load the local library via the site.py mechanism.
# (this mimics http://svn.python.org/projects/python/trunk/Lib/distutils/dist.py, called in setup behind the scenes)
from distutils.core import Distribution
d = Distribution()
d.parse_command_line()

# CRITICAL!
# Uses a wrapped python executable to not load the user's "site" packages in ~/.local.
# Otherwise, setuptool's startup scripts do not work, if there is a conflicting
# setuptools version in .local/lib/python-packages (or, any other locally installed python lib)
# setting sys.executable changes the she-bang #!... at the top of these scripts
# credits to http://stackoverflow.com/a/17329493
python2_nosite = '/usr/local/bin/python2-nosite'
# don't overwrite for local smc-in-smc development
if 'user' not in d.command_options.get("install", {}).keys():
    # check, if python2_nosite exists and is executable
    if os.path.isfile(python2_nosite) and os.access(python2_nosite, os.X_OK):
        import sys
        sys.executable = python2_nosite
Example #46
0
def cython_inline(code,
                  get_type=unsafe_type,
                  lib_dir=os.path.expanduser('~/.cython/inline'),
                  cython_include_dirs=['.'],
                  force=False,
                  quiet=False,
                  locals=None,
                  globals=None,
                  **kwds):
    if get_type is None:
        get_type = lambda x: 'object'
    code = to_unicode(code)
    orig_code = code
    code, literals = strip_string_literals(code)
    code = strip_common_indent(code)
    ctx = Context(cython_include_dirs, default_options)
    if locals is None:
        locals = inspect.currentframe().f_back.f_back.f_locals
    if globals is None:
        globals = inspect.currentframe().f_back.f_back.f_globals
    try:
        for symbol in unbound_symbols(code):
            if symbol in kwds:
                continue
            elif symbol in locals:
                kwds[symbol] = locals[symbol]
            elif symbol in globals:
                kwds[symbol] = globals[symbol]
            else:
                print(("Couldn't find ", symbol))
    except AssertionError:
        if not quiet:
            # Parsing from strings not fully supported (e.g. cimports).
            print(
                "Could not parse code as a string (to extract unbound symbols)."
            )
    arg_names = list(kwds.keys())
    arg_names.sort()
    arg_sigs = tuple([(get_type(kwds[arg], ctx), arg) for arg in arg_names])
    key = orig_code, arg_sigs, sys.version_info, sys.executable, Cython.__version__
    module_name = "_cython_inline_" + hashlib.md5(
        str(key).encode('utf-8')).hexdigest()
    try:
        if not os.path.exists(lib_dir):
            os.makedirs(lib_dir)
        if lib_dir not in sys.path:
            sys.path.append(lib_dir)
        if force:
            raise ImportError
        else:
            __import__(module_name)
    except ImportError:
        cflags = []
        c_include_dirs = []
        cimports = []
        qualified = re.compile(r'([.\w]+)[.]')
        for type, _ in arg_sigs:
            m = qualified.match(type)
            if m:
                cimports.append('\ncimport %s' % m.groups()[0])
                # one special case
                if m.groups()[0] == 'numpy':
                    import numpy
                    c_include_dirs.append(numpy.get_include())
                    # cflags.append('-Wno-unused')
        module_body, func_body = extract_func_code(code)
        params = ', '.join(['%s %s' % a for a in arg_sigs])
        module_code = """
%(module_body)s
%(cimports)s
def __invoke(%(params)s):
%(func_body)s
        """ % {
            'cimports': '\n'.join(cimports),
            'module_body': module_body,
            'params': params,
            'func_body': func_body
        }
        for key, value in list(literals.items()):
            module_code = module_code.replace(key, value)
        pyx_file = os.path.join(lib_dir, module_name + '.pyx')
        fh = open(pyx_file, 'w')
        try:
            fh.write(module_code)
        finally:
            fh.close()
        extension = Extension(name=module_name,
                              sources=[pyx_file],
                              include_dirs=c_include_dirs,
                              extra_compile_args=cflags)
        build_extension = build_ext(Distribution())
        build_extension.finalize_options()
        build_extension.extensions = cythonize([extension],
                                               ctx=ctx,
                                               quiet=quiet)
        build_extension.build_temp = os.path.dirname(pyx_file)
        build_extension.build_lib = lib_dir
        build_extension.run()
        _code_cache[key] = module_name
    arg_list = [kwds[arg] for arg in arg_names]
    return __import__(module_name).__invoke(*arg_list)
    def test_finalize_options(self):
        # Make sure Python's include directories (for Python.h, pyconfig.h,
        # etc.) are in the include search path.
        modules = [Extension('foo', ['xxx'])]
        dist = Distribution({'name': 'xx', 'ext_modules': modules})
        cmd = build_ext(dist)
        cmd.finalize_options()

        from distutils import sysconfig
        py_include = sysconfig.get_python_inc()
        self.assert_(py_include in cmd.include_dirs)

        plat_py_include = sysconfig.get_python_inc(plat_specific=1)
        self.assert_(plat_py_include in cmd.include_dirs)

        # make sure cmd.libraries is turned into a list
        # if it's a string
        cmd = build_ext(dist)
        cmd.libraries = 'my_lib'
        cmd.finalize_options()
        self.assertEquals(cmd.libraries, ['my_lib'])

        # make sure cmd.library_dirs is turned into a list
        # if it's a string
        cmd = build_ext(dist)
        cmd.library_dirs = 'my_lib_dir'
        cmd.finalize_options()
        self.assert_('my_lib_dir' in cmd.library_dirs)

        # make sure rpath is turned into a list
        # if it's a list of os.pathsep's paths
        cmd = build_ext(dist)
        cmd.rpath = os.pathsep.join(['one', 'two'])
        cmd.finalize_options()
        self.assertEquals(cmd.rpath, ['one', 'two'])

        # XXX more tests to perform for win32

        # make sure define is turned into 2-tuples
        # strings if they are ','-separated strings
        cmd = build_ext(dist)
        cmd.define = 'one,two'
        cmd.finalize_options()
        self.assertEquals(cmd.define, [('one', '1'), ('two', '1')])

        # make sure undef is turned into a list of
        # strings if they are ','-separated strings
        cmd = build_ext(dist)
        cmd.undef = 'one,two'
        cmd.finalize_options()
        self.assertEquals(cmd.undef, ['one', 'two'])

        # make sure swig_opts is turned into a list
        cmd = build_ext(dist)
        cmd.swig_opts = None
        cmd.finalize_options()
        self.assertEquals(cmd.swig_opts, [])

        cmd = build_ext(dist)
        cmd.swig_opts = '1 2'
        cmd.finalize_options()
        self.assertEquals(cmd.swig_opts, ['1', '2'])
Example #48
0
 def parse_command_line(self):
     """Process features after parsing command line options"""
     result = _Distribution.parse_command_line(self)
     if self.features:
         self._finalize_features()
     return result
Example #49
0
def __compile(
        scoring_function='OneMinus<MeanAffinity<RegionGraphType, ScoreValue>>',
        discretize_queue=0,
        force_rebuild=False):
    import sys
    import os
    import shutil
    import glob
    import numpy
    import fcntl

    try:
        import hashlib
    except ImportError:
        import md5 as hashlib

    from distutils.core import Distribution, Extension
    from distutils.command.build_ext import build_ext
    from distutils.sysconfig import get_config_vars, get_python_inc

    import Cython
    from Cython.Compiler.Main import Context, default_options
    from Cython.Build.Dependencies import cythonize

    # compile frontend.pyx for given scoring function

    source_dir = os.path.dirname(os.path.abspath(__file__))
    source_files = [
        os.path.join(source_dir, 'agglomerate.pyx'),
        os.path.join(source_dir, 'frontend_agglomerate.h'),
        os.path.join(source_dir, 'frontend_agglomerate.cpp')
    ]
    source_files += glob.glob(source_dir + '/backend/*.hpp')
    source_files.sort()
    source_files_hashes = [
        hashlib.md5(open(f, 'r').read().encode('utf-8')).hexdigest()
        for f in source_files
    ]

    key = scoring_function, discretize_queue, source_files_hashes, sys.version_info, sys.executable, Cython.__version__
    module_name = 'waterz_' + hashlib.md5(str(key).encode('utf-8')).hexdigest()
    lib_dir = os.path.expanduser('~/.cython/inline')

    # since this could be called concurrently, there is no good way to check
    # whether the directory already exists
    try:
        os.makedirs(lib_dir)
    except:
        pass

    # make sure the same module is not build concurrently
    with open(os.path.join(lib_dir, module_name + '.lock'), 'w') as lock_file:
        fcntl.lockf(lock_file, fcntl.LOCK_EX)

        try:

            if lib_dir not in sys.path:
                sys.path.append(lib_dir)
            if force_rebuild:
                raise ImportError
            else:
                print("Re-using already compiled waterz version")
                return module_name

        except ImportError:

            print("Compiling waterz in " + str(lib_dir))

            cython_include_dirs = ['.']
            ctx = Context(cython_include_dirs, default_options)

            include_dir = os.path.join(lib_dir, module_name)
            if not os.path.exists(include_dir):
                os.makedirs(include_dir)

            include_dirs = [
                source_dir,
                include_dir,
                os.path.join(source_dir, 'backend'),
                os.path.dirname(get_python_inc()),
                numpy.get_include(),
            ]

            scoring_function_header = os.path.join(include_dir,
                                                   'ScoringFunction.h')
            with open(scoring_function_header, 'w') as f:
                f.write('typedef %s ScoringFunctionType;' % scoring_function)

            queue_header = os.path.join(include_dir, 'Queue.h')
            with open(queue_header, 'w') as f:
                if discretize_queue == 0:
                    f.write(
                        'template<typename T, typename S> using QueueType = PriorityQueue<T, S>;'
                    )
                else:
                    f.write(
                        'template<typename T, typename S> using QueueType = BinQueue<T, S, %d>;'
                        % discretize_queue)

            # cython requires that the pyx file has the same name as the module
            shutil.copy(os.path.join(source_dir, 'agglomerate.pyx'),
                        os.path.join(lib_dir, module_name + '.pyx'))
            shutil.copy(
                os.path.join(source_dir, 'frontend_agglomerate.cpp'),
                os.path.join(lib_dir,
                             module_name + '_frontend_agglomerate.cpp'))

            # Remove the "-Wstrict-prototypes" compiler option, which isn't valid
            # for C++.
            cfg_vars = get_config_vars()
            if "CFLAGS" in cfg_vars:
                cfg_vars["CFLAGS"] = cfg_vars["CFLAGS"].replace(
                    "-Wstrict-prototypes", "")

            extension = Extension(
                module_name,
                sources=[
                    os.path.join(lib_dir, module_name + '.pyx'),
                    os.path.join(lib_dir,
                                 module_name + '_frontend_agglomerate.cpp')
                ],
                include_dirs=include_dirs,
                language='c++',
                extra_link_args=['-std=c++11'],
                extra_compile_args=['-std=c++11', '-w'])
            build_extension = build_ext(Distribution())
            build_extension.finalize_options()
            build_extension.extensions = cythonize([extension],
                                                   quiet=True,
                                                   nthreads=2)
            build_extension.build_temp = lib_dir
            build_extension.build_lib = lib_dir
            build_extension.run()
            return module_name
Example #50
0
def agglomerate(
        affs,
        thresholds,
        gt=None,
        fragments=None,
        aff_threshold_low=0.0001,
        aff_threshold_high=0.9999,
        return_merge_history=False,
        scoring_function='Multiply<OneMinus<MaxAffinity<AffinitiesType>>, MinSize<SizesType>>',
        discretize_queue=0,
        force_rebuild=False):
    '''
    Compute segmentations from an affinity graph for several thresholds.

    Passed volumes need to be converted into contiguous memory arrays. This will
    be done for you if needed, but you can save memory by making sure your
    volumes are already C_CONTIGUOUS.

    Parameters
    ----------

        affs: numpy array, float32, 4 dimensional

            The affinities as an array with affs[channel][z][y][x].

        thresholds: list of float32

            The thresholds to compute segmentations for. For each threshold, one
            segmentation is returned.

        gt: numpy array, uint32, 3 dimensional (optional)

            An optional ground-truth segmentation as an array with gt[z][y][x].
            If given, metrics

        fragments: numpy array, uint64, 3 dimensional (optional)

            An optional volume of fragments to use, instead of the build-in 
            zwatershed.

        aff_threshold_low: float, default 0.0001
        aff_threshold_high: float, default 0.9999,

            Thresholds on the affinities for the initial segmentation step.

        return_merge_history: bool

            If set to True, the returning tuple will contain a merge history,
            relative to the previous segmentation.

        scoring_function: string, default 'Multiply<OneMinus<MaxAffinity<AffinitiesType>>, MinSize<SizesType>>'

            A C++ type string specifying the edge scoring function to use. See

                https://github.com/funkey/waterz/blob/master/waterz/backend/MergeFunctions.hpp

            for available functions, and

                https://github.com/funkey/waterz/blob/master/waterz/backend/Operators.hpp

            for operators to combine them.

        discretize_queue: int

            If set to non-zero, a bin queue with that many bins will be used to 
            approximate the priority queue for merge operations.

        force_rebuild:

            Force the rebuild of the module. Only needed for development.

    Returns
    -------

        Results are returned as tuples from a generator object, and only
        computed on-the-fly when iterated over. This way, you can ask for
        hundreds of thresholds while at any point only one segmentation is
        stored in memory.

        Depending on the given parameters, the returned values are a subset of
        the following items (in that order):

        segmentation

            The current segmentation (numpy array, uint64, 3 dimensional).

        metrics (only if ground truth was provided)

            A  dictionary with the keys 'V_Rand_split', 'V_Rand_merge',
            'V_Info_split', and 'V_Info_merge'.

        merge_history (only if return_merge_history is True)

            A list of dictionaries with keys 'a', 'b', 'c', and 'score',
            indicating that region a got merged with b into c with the given
            score.

    Examples
    --------

        affs = ...
        gt   = ...

        # only segmentation
        for segmentation in agglomerate(affs, range(100,10000,100)):
            # ...

        # segmentation with merge history
        for segmentation, merge_history in agglomerate(affs, range(100,10000,100), return_merge_history = True):
            # ...

        # segmentation with merge history and metrics compared to gt
        for segmentation, metrics, merge_history in agglomerate(affs, range(100,10000,100), gt, return_merge_history = True):
            # ...
    '''

    import sys, os
    import shutil
    import glob
    import numpy
    import fcntl

    try:
        import hashlib
    except ImportError:
        import md5 as hashlib

    from distutils.core import Distribution, Extension
    from distutils.command.build_ext import build_ext
    from distutils.sysconfig import get_config_vars, get_python_inc

    import Cython
    from Cython.Compiler.Main import Context, default_options
    from Cython.Build.Dependencies import cythonize

    # compile frontend.pyx for given scoring function

    source_dir = os.path.dirname(os.path.abspath(__file__))
    source_files = [
        os.path.join(source_dir, 'frontend.pyx'),
        os.path.join(source_dir, 'c_frontend.h'),
        os.path.join(source_dir, 'c_frontend.cpp'),
        os.path.join(source_dir, 'evaluate.hpp')
    ]
    source_files += glob.glob(source_dir + '/backend/*.hpp')
    source_files.sort()
    source_files_hashes = [
        hashlib.md5(open(f, 'r').read().encode('utf-8')).hexdigest()
        for f in source_files
    ]

    key = scoring_function, discretize_queue, source_files_hashes, sys.version_info, sys.executable, Cython.__version__
    module_name = 'waterz_' + hashlib.md5(str(key).encode('utf-8')).hexdigest()
    lib_dir = os.path.expanduser('~/.cython/inline')

    # since this could be called concurrently, there is no good way to check
    # whether the directory already exists
    try:
        os.makedirs(lib_dir)
    except:
        pass

    # make sure the same module is not build concurrently
    with open(os.path.join(lib_dir, module_name + '.lock'), 'w') as lock_file:
        fcntl.lockf(lock_file, fcntl.LOCK_EX)

        try:

            if lib_dir not in sys.path:
                sys.path.append(lib_dir)
            if force_rebuild:
                raise ImportError
            else:
                __import__(module_name)

            print("Re-using already compiled waterz version")

        except ImportError:

            print("Compiling waterz in " + str(lib_dir))

            cython_include_dirs = ['.']
            ctx = Context(cython_include_dirs, default_options)

            include_dir = os.path.join(lib_dir, module_name)
            if not os.path.exists(include_dir):
                os.makedirs(include_dir)

            include_dirs = [
                source_dir,
                include_dir,
                os.path.join(source_dir, 'backend'),
                os.path.dirname(get_python_inc()),
                numpy.get_include(),
            ]

            scoring_function_header = os.path.join(include_dir,
                                                   'ScoringFunction.h')
            with open(scoring_function_header, 'w') as f:
                f.write('typedef %s ScoringFunctionType;' % scoring_function)

            queue_header = os.path.join(include_dir, 'Queue.h')
            with open(queue_header, 'w') as f:
                if discretize_queue == 0:
                    f.write(
                        'template<typename T, typename S> using QueueType = PriorityQueue<T, S>;'
                    )
                else:
                    f.write(
                        'template<typename T, typename S> using QueueType = BinQueue<T, S, %d>;'
                        % discretize_queue)

            # cython requires that the pyx file has the same name as the module
            shutil.copy(os.path.join(source_dir, 'frontend.pyx'),
                        os.path.join(lib_dir, module_name + '.pyx'))
            shutil.copy(os.path.join(source_dir, 'c_frontend.cpp'),
                        os.path.join(lib_dir, module_name + '_c_frontend.cpp'))

            # Remove the "-Wstrict-prototypes" compiler option, which isn't valid
            # for C++.
            cfg_vars = get_config_vars()
            if "CFLAGS" in cfg_vars:
                cfg_vars["CFLAGS"] = cfg_vars["CFLAGS"].replace(
                    "-Wstrict-prototypes", "")

            extension = Extension(
                module_name,
                sources=[
                    os.path.join(lib_dir, module_name + '.pyx'),
                    os.path.join(lib_dir, module_name + '_c_frontend.cpp')
                ],
                include_dirs=include_dirs,
                language='c++',
                extra_link_args=['-std=c++11'],
                extra_compile_args=['-std=c++11', '-w'])
            build_extension = build_ext(Distribution())
            build_extension.finalize_options()
            build_extension.extensions = cythonize([extension],
                                                   quiet=True,
                                                   nthreads=2)
            build_extension.build_temp = lib_dir
            build_extension.build_lib = lib_dir
            build_extension.run()

    return __import__(module_name).agglomerate(affs, thresholds, gt, fragments,
                                               aff_threshold_low,
                                               aff_threshold_high,
                                               return_merge_history)
Example #51
0
def main():
    argparser = get_parser()
    args = argparser.parse_args(sys.argv[1:])

    if args.profile_clean:
        profile_clean()
        return

    distutils.log.set_verbosity(1)

    system = BuildSystem(args)

    if args.path:
        ext_modules = [system.get_extension_by_path(args.path)]
    else:
        ext_modules = system.discover_extensions()

    if args.kit:
        ext_modules = [e for e in ext_modules if e.name in kits[args.kit]]

    def add_args(arg_name, values, append=True):
        for ext_module in ext_modules:
            arg_value = getattr(ext_module, arg_name) or []
            if append:
                arg_value.extend(values)
            else:
                newvalues = list(values)
                newvalues.extend(arg_value)
                arg_value = newvalues
            setattr(ext_module, arg_name, arg_value)

    def append_compile_args(*values):
        add_args('extra_compile_args', values)

    def append_link_args(*values):
        add_args('extra_link_args', values)

    def prepend_libraries(*values):
        add_args('libraries', values, append=False)

    if args.native:
        append_compile_args('-march=native')
    if args.optimization:
        append_compile_args('-O' + args.optimization)
    if args.debug:
        append_compile_args('-g3', '-O0', '-Wp,-U_FORTIFY_SOURCE')
    if args.sanitize:
        append_compile_args('-g3', '-fsanitize=address',
                            '-fsanitize=undefined', '-fno-common',
                            '-fno-omit-frame-pointer')
        prepend_libraries('asan', 'ubsan')
    if args.profile_generate:
        append_compile_args('--profile-generate')
        append_link_args('-lgcov')
    if args.profile_use:
        for ext_module in ext_modules:
            if ext_module.name in ('parser.cparser', 'pipeline.cpipeline'):
                continue
            ext_module.extra_compile_args.append('--profile-use')
    if args.flto:
        append_compile_args('-flto')
        append_link_args('-flto')
    if args.coverage:
        append_compile_args('--coverage')
        append_link_args('-lgcov')
    if args.extra_compile:
        append_compile_args(args.extra_compile)

    ext_modules = [e for e in ext_modules if system.should_rebuild(e)]
    if not ext_modules:
        return

    dist = Distribution(dict(ext_modules=ext_modules))

    prune(args.dest)

    cmd = custom_build_ext(dist)
    cmd.build_lib = os.path.join(args.dest, '.build/lib')
    cmd.build_temp = os.path.join(args.dest, '.build/temp')
    cmd.finalize_options()

    try:
        cmd.run()
    except CompileError:
        sys.exit(1)

    symlink_python_files(args.dest)

    for ext_module in ext_modules:
        os.makedirs(system.dest_folder(ext_module.name), exist_ok=True)
        shutil.copy(cmd.get_ext_fullpath(ext_module.name),
                    system.dest_folder(ext_module.name))

    for ext_module in ext_modules:
        with open(system.build_toml(ext_module.name), 'w') as f:
            build_info = {
                'extra_compile_args': ext_module.extra_compile_args,
                'extra_link_args': ext_module.extra_link_args,
                'define_macros': dict(ext_module.define_macros),
                'sources': ext_module.sources
            }
            pytoml.dump(build_info, f)
Example #52
0
def get_config():
    from distutils.core import Distribution
    from distutils.sysconfig import get_config_vars
    get_config_vars()  # workaround for a bug of distutils, e.g. on OS/X
    config = Distribution().get_command_obj('config')
    return config
Example #53
0
def malis(affs, gt, force_rebuild=False):

    import sys, os
    import shutil
    import glob
    import numpy
    import fcntl

    try:
        import hashlib
    except ImportError:
        import md5 as hashlib

    from distutils.core import Distribution, Extension
    from distutils.command.build_ext import build_ext
    from distutils.sysconfig import get_config_vars, get_python_inc

    import Cython
    from Cython.Compiler.Main import Context, default_options
    from Cython.Build.Dependencies import cythonize

    source_dir = os.path.dirname(os.path.abspath(__file__))
    source_files = [
        os.path.join(source_dir, 'frontend.pyx'),
        os.path.join(source_dir, 'malis_loss_layer.hpp'),
        os.path.join(source_dir, 'malis_loss_layer.cpp'),
        os.path.join(source_dir, 'c_frontend.h'),
    ]
    source_files.sort()
    source_files_hashes = [
        hashlib.md5(open(f, 'r').read().encode('utf-8')).hexdigest()
        for f in source_files
    ]

    key = source_files_hashes, sys.version_info, sys.executable, Cython.__version__
    module_name = 'pymalis_' + hashlib.md5(
        str(key).encode('utf-8')).hexdigest()
    lib_dir = os.path.expanduser('~/.cython/inline')

    # since this could be called concurrently, there is no good way to check
    # whether the directory already exists
    try:
        os.makedirs(lib_dir)
    except:
        pass

    # make sure the same module is not build concurrently
    with open(os.path.join(lib_dir, module_name + '.lock'), 'w') as lock_file:
        fcntl.lockf(lock_file, fcntl.LOCK_EX)

        try:

            if lib_dir not in sys.path:
                sys.path.append(lib_dir)
            if force_rebuild:
                raise ImportError
            else:
                __import__(module_name)

            print("Re-using already compiled pymalis version")

        except ImportError:

            print("Compiling pymalis in " + str(lib_dir))

            cython_include_dirs = ['.']
            ctx = Context(cython_include_dirs, default_options)

            scoring_function_include_dir = os.path.join(lib_dir, module_name)
            if not os.path.exists(scoring_function_include_dir):
                os.makedirs(scoring_function_include_dir)

            include_dirs = [
                source_dir,
                os.path.dirname(get_python_inc()),
                numpy.get_include(),
            ]

            # cython requires that the pyx file has the same name as the module
            shutil.copy(os.path.join(source_dir, 'frontend.pyx'),
                        os.path.join(lib_dir, module_name + '.pyx'))
            shutil.copy(
                os.path.join(source_dir, 'malis_loss_layer.cpp'),
                os.path.join(lib_dir, module_name + '_malis_loss_layer.cpp'))

            # Remove the "-Wstrict-prototypes" compiler option, which isn't valid
            # for C++.
            cfg_vars = get_config_vars()
            if "CFLAGS" in cfg_vars:
                cfg_vars["CFLAGS"] = cfg_vars["CFLAGS"].replace(
                    "-Wstrict-prototypes", "")

            extension = Extension(
                module_name,
                sources=[
                    os.path.join(lib_dir, module_name + '.pyx'),
                    os.path.join(lib_dir,
                                 module_name + '_malis_loss_layer.cpp')
                ],
                include_dirs=include_dirs,
                language='c++',
                extra_link_args=['-std=c++11'],
                extra_compile_args=['-std=c++11', '-w'])
            build_extension = build_ext(Distribution())
            build_extension.finalize_options()
            build_extension.extensions = cythonize([extension],
                                                   quiet=True,
                                                   nthreads=2)
            build_extension.build_temp = lib_dir
            build_extension.build_lib = lib_dir
            build_extension.run()

    return __import__(module_name).malis(affs, gt)
Example #54
0
 def get_build_scripts_cmd(self, target, scripts):
     import sys
     dist = Distribution()
     dist.scripts = scripts
     dist.command_obj['build'] = support.DummyCommand(build_scripts=target, force=1, executable=sys.executable)
     return build_scripts(dist)
Example #55
0
def compile_c_extension(
    generated_source_path: str,
    build_dir: Optional[str] = None,
    verbose: bool = False,
    keep_asserts: bool = True,
) -> str:
    """Compile the generated source for a parser generator into an extension module.

    The extension module will be generated in the same directory as the provided path
    for the generated source, with the same basename (in addition to extension module
    metadata). For example, for the source mydir/parser.c the generated extension
    in a darwin system with python 3.8 will be mydir/parser.cpython-38-darwin.so.

    If *build_dir* is provided, that path will be used as the temporary build directory
    of distutils (this is useful in case you want to use a temporary directory).
    """
    import distutils.log
    from distutils.core import Distribution, Extension
    from distutils.command.clean import clean  # type: ignore
    from distutils.command.build_ext import build_ext  # type: ignore
    from distutils.tests.support import fixup_build_ext  # type: ignore

    if verbose:
        distutils.log.set_verbosity(distutils.log.DEBUG)

    source_file_path = pathlib.Path(generated_source_path)
    extension_name = source_file_path.stem
    extra_compile_args = get_extra_flags("CFLAGS", "PY_CFLAGS_NODIST")
    extra_link_args = get_extra_flags("LDFLAGS", "PY_LDFLAGS_NODIST")
    if keep_asserts:
        extra_compile_args.append("-UNDEBUG")
    cpython_root_str = os.getenv("CPYTHON_ROOT")
    if cpython_root_str:
        cpython_root: pathlib.Path = pathlib.Path(cpython_root_str)
    else:
        cpython_root = MOD_DIR.parent.parent / "cpython"
        if not (cpython_root / "Python").is_dir():
            # This is Guido's convention. :-)
            cpython_root = pathlib.Path.home() / "cpython"
        if not (cpython_root / "Python").is_dir():
            raise ValueError(
                "No CPython repository found. Please use the CPYTHON_ROOT env variable."
            )
    extension = [
        Extension(
            extension_name,
            sources=[
                str(cpython_root / "Python" / "Python-ast.c"),
                str(cpython_root / "Python" / "asdl.c"),
                str(cpython_root / "Parser" / "tokenizer.c"),
                str(cpython_root / "Parser" / "pegen.c"),
                str(cpython_root / "Parser" / "string_parser.c"),
                str(MOD_DIR.parent / "peg_extension" / "peg_extension.c"),
                generated_source_path,
            ],
            include_dirs=[
                str(cpython_root / "Include" / "internal"),
                str(cpython_root / "Parser"),
            ],
            extra_compile_args=extra_compile_args,
            extra_link_args=extra_link_args,
        )
    ]
    dist = Distribution({"name": extension_name, "ext_modules": extension})
    cmd = build_ext(dist)
    fixup_build_ext(cmd)
    cmd.inplace = True
    if build_dir:
        cmd.build_temp = build_dir
        cmd.build_lib = build_dir
    cmd.ensure_finalized()
    cmd.run()

    extension_path = source_file_path.parent / cmd.get_ext_filename(
        extension_name)
    shutil.move(cmd.get_ext_fullpath(extension_name), extension_path)

    cmd = clean(dist)
    cmd.finalize_options()
    cmd.run()

    return extension_path
Example #56
0
def get_command_class(name):
    # Returns the right class for either distutils or setuptools
    return Distribution({}).get_command_class(name)
Example #57
0
 def print_commands(self):
     for ep in pkg_resources.iter_entry_points('distutils.commands'):
         if ep.name not in self.cmdclass:
             cmdclass = ep.load(False) # don't require extras, we're not running
             self.cmdclass[ep.name] = cmdclass
     return _Distribution.print_commands(self)
Example #58
0
    def test_user_site(self):
        # test install with --user
        # preparing the environment for the test
        self.old_user_base = site.USER_BASE
        self.old_user_site = site.USER_SITE
        self.tmpdir = self.mkdtemp()
        self.user_base = os.path.join(self.tmpdir, 'B')
        self.user_site = os.path.join(self.tmpdir, 'S')
        site.USER_BASE = self.user_base
        site.USER_SITE = self.user_site
        install_module.USER_BASE = self.user_base
        install_module.USER_SITE = self.user_site

        def _expanduser(path):
            if path.startswith('~'):
                return os.path.normpath(self.tmpdir + path[1:])
            return path

        self.old_expand = os.path.expanduser
        os.path.expanduser = _expanduser

        def cleanup():
            site.USER_BASE = self.old_user_base
            site.USER_SITE = self.old_user_site
            install_module.USER_BASE = self.old_user_base
            install_module.USER_SITE = self.old_user_site
            os.path.expanduser = self.old_expand

        self.addCleanup(cleanup)

        for key in ('nt_user', 'posix_user'):
            self.assertIn(key, INSTALL_SCHEMES)

        dist = Distribution({'name': 'xx'})
        cmd = install(dist)

        # making sure the user option is there
        options = [name for name, short, lable in cmd.user_options]
        self.assertIn('user', options)

        # setting a value
        cmd.user = 1

        # user base and site shouldn't be created yet
        self.assertFalse(os.path.exists(self.user_base))
        self.assertFalse(os.path.exists(self.user_site))

        # let's run finalize
        cmd.ensure_finalized()

        # now they should
        self.assertTrue(os.path.exists(self.user_base))
        self.assertTrue(os.path.exists(self.user_site))

        self.assertIn('userbase', cmd.config_vars)
        self.assertIn('usersite', cmd.config_vars)

        actual_headers = os.path.relpath(cmd.install_headers, self.user_base)
        if os.name == 'nt':
            site_path = os.path.relpath(os.path.dirname(self.old_user_site),
                                        self.old_user_base)
            include = os.path.join(site_path, 'Include')
        else:
            include = sysconfig.get_python_inc(0, '')
        expect_headers = os.path.join(include, 'xx')

        self.assertEqual(os.path.normcase(actual_headers),
                         os.path.normcase(expect_headers))
Example #59
0
def get_command_class(name):
    # in case pip loads with setuptools this returns the extended commands
    return Distribution({}).get_command_class(name)
Example #60
0
def compile_c_extension(
    generated_source_path: str,
    build_dir: Optional[str] = None,
    verbose: bool = False,
    keep_asserts: bool = True,
) -> str:
    """Compile the generated source for a parser generator into an extension module.

    The extension module will be generated in the same directory as the provided path
    for the generated source, with the same basename (in addition to extension module
    metadata). For example, for the source mydir/parser.c the generated extension
    in a darwin system with python 3.8 will be mydir/parser.cpython-38-darwin.so.

    If *build_dir* is provided, that path will be used as the temporary build directory
    of distutils (this is useful in case you want to use a temporary directory).
    """
    if verbose:
        distutils.log.set_verbosity(distutils.log.DEBUG)

    source_file_path = pathlib.Path(generated_source_path)
    extension_name = source_file_path.stem
    extra_compile_args = []
    if not sys.platform.startswith('win'):
        extra_compile_args.append("-std=c99")
    if keep_asserts:
        extra_compile_args.append("-UNDEBUG")
    extension = [
        Extension(
            extension_name,
            sources=[
                str(MOD_DIR.parent.parent.parent / "Python" / "Python-ast.c"),
                str(MOD_DIR.parent.parent.parent / "Python" / "asdl.c"),
                str(MOD_DIR.parent.parent.parent / "Parser" / "tokenizer.c"),
                str(MOD_DIR.parent.parent.parent / "Parser" / "pegen" / "pegen.c"),
                str(MOD_DIR.parent.parent.parent / "Parser" / "pegen" / "parse_string.c"),
                str(MOD_DIR.parent / "peg_extension" / "peg_extension.c"),
                generated_source_path,
            ],
            include_dirs=[
                str(MOD_DIR.parent.parent.parent / "Include" / "internal"),
                str(MOD_DIR.parent.parent.parent / "Parser"),
                str(MOD_DIR.parent.parent.parent / "Parser" / "pegen"),
            ],
            extra_compile_args=extra_compile_args,
        )
    ]
    dist = Distribution({"name": extension_name, "ext_modules": extension})
    cmd = build_ext(dist)
    fixup_build_ext(cmd)
    cmd.inplace = True
    if build_dir:
        cmd.build_temp = build_dir
    cmd.ensure_finalized()
    cmd.run()

    extension_path = source_file_path.parent / cmd.get_ext_filename(extension_name)
    shutil.move(cmd.get_ext_fullpath(extension_name), extension_path)

    cmd = clean(dist)
    cmd.finalize_options()
    cmd.run()

    return extension_path