예제 #1
0
파일: setup.py 프로젝트: queer1/picard
 def finalize_options(self):
     if self.files:
         files = []
         for f in self.files.split(","):
             head, tail = os.path.split(f)
             m = re.match(r'(?:ui_)?([^.]+)', tail)
             if m:
                 name = m.group(1)
             else:
                 log.warn('ignoring %r (cannot extract base name)' % f)
                 continue
             uiname = name + '.ui'
             uifile = os.path.join(head, uiname)
             if os.path.isfile(uifile):
                 pyfile = os.path.join(os.path.dirname(uifile),
                                       py_from_ui(uifile))
                 files.append((uifile, pyfile))
             else:
                 uifile = os.path.join('ui', uiname)
                 if os.path.isfile(uifile):
                     files.append((uifile,
                                   py_from_ui_with_defaultdir(uifile)))
                 else:
                     log.warn('ignoring %r' % f)
         self.files = files
예제 #2
0
def _install(tarball, install_args=()):
    """install tarball."""
    # extracting the tarball
    tmpdir = tempfile.mkdtemp()
    log.warn('Extracting in %s', tmpdir)
    old_wd = os.getcwd()
    try:
        os.chdir(tmpdir)
        tar = tarfile.open(tarball)
        _extractall(tar)
        tar.close()

        # going in the directory
        subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
        os.chdir(subdir)
        log.warn('Now working in %s', subdir)

        # installing
        log.warn('Installing Setuptools')
        if not _python_cmd('setup.py', 'install', *install_args):
            log.warn('Something went wrong during the installation.')
            log.warn('See the error message above.')
            # exitcode will be 2
            return 2

    finally:
        os.chdir(old_wd)
        shutil.rmtree(tmpdir)
예제 #3
0
파일: util.py 프로젝트: johnnoone/cardhu
def dist2_to_args(config, dist=None):
    '''
    Converts from distutil2 to distutil1 options.
    '''
    dest = {}
    for (section, option), func in D2TO1:
        if func is None:
            log.warn('key %r not yet implemented', (section, option))
            continue

        if not func:
            continue

        try:
            value = config[section][option]
        except KeyError:
            log.info('%r not found', (section, option))
            continue

        if func == 'let':
            if (section, option) == ('global', 'commands'):
                assign_cmds(config, dest, value, dist)
                continue
            if (section, option) == ('global', 'compilers'):
                continue
            raise Exception('Not implemented')

        func(config, dest, value)

    return dest
예제 #4
0
    def _get_dlls(self):
        # return a list of of (FQ-in-name, relative-out-name) tuples.
        ret = []
        # the apr binaries.
        apr_bins = [libname + ".dll" for libname in extra_libs
                    if libname.startswith("libapr")]
        if get_svn_version() >= (1,5,0):
            # Since 1.5.0 these libraries became shared
            apr_bins += """libsvn_client-1.dll libsvn_delta-1.dll libsvn_diff-1.dll
                           libsvn_fs-1.dll libsvn_ra-1.dll libsvn_repos-1.dll
                           libsvn_subr-1.dll libsvn_wc-1.dll libsasl.dll""".split()
        if get_svn_version() >= (1,7,0):
            apr_bins += ["libdb48.dll"]
        else:
            apr_bins += ["libdb44.dll"]
        apr_bins += """intl3_svn.dll libeay32.dll ssleay32.dll""".split()
        look_dirs = os.environ.get("PATH","").split(os.pathsep)
        look_dirs.insert(0, os.path.join(os.environ["SVN_DEV"], "bin"))

        target = os.path.abspath(os.path.join(self.build_lib, 'subvertpy'))
        for bin in apr_bins:
            for look in look_dirs:
                f = os.path.join(look, bin)
                if os.path.isfile(f):
                    ret.append((f, target))
                    break
            else:
                log.warn("Could not find required DLL %r to include", bin)
                log.debug("(looked in %s)", look_dirs)
        return ret
예제 #5
0
파일: util.py 프로젝트: johnnoone/cardhu
def register_custom_compilers(config):
    """Handle custom compilers; this has no real equivalent in distutils, where
    additional compilers could only be added programmatically, so we have to
    hack it in somehow.
    """

    try:
        compilers = config['global']['compilers']
    except KeyError:
        return

    import distutils.ccompiler

    compiler_class = distutils.ccompiler.compiler_class

    for compiler in compilers:
        compiler = load(compiler)

        name = getattr(compiler, 'name', compiler.__name__)
        desc = getattr(compiler, 'description', 'custom compiler %s' % name)
        module_name = compiler.__module__

        if name in compiler_class:
            log.warn('override %r compiler', name)
        compiler_class[name] = (module_name, compiler.__name__, desc)

        # Distutils assumes all compiler modules are in the distutils package
        sys.modules['distutils.' + module_name] = sys.modules[module_name]
예제 #6
0
파일: dir_util.py 프로젝트: d11/rts
def remove_tree(directory, verbose=1, dry_run=0):
    """Recursively remove an entire directory tree.

    Any errors are ignored (apart from being reported to stdout if 'verbose'
    is true).
    """
    from distutils.util import grok_environment_error
    global _path_created

    if verbose >= 1:
        log.info("removing '%s' (and everything under it)", directory)
    if dry_run:
        return
    cmdtuples = []
    _build_cmdtuple(directory, cmdtuples)
    for cmd in cmdtuples:
        try:
            cmd[0](cmd[1])
            # remove dir from cache if it's already there
            abspath = os.path.abspath(cmd[1])
            if abspath in _path_created:
                del _path_created[abspath]
        except (IOError, OSError) as exc:
            log.warn(grok_environment_error(
                    exc, "error removing %s: " % directory))
예제 #7
0
    def run(self):
        _clean.run(self)

        import fnmatch

        # kill temporary files
        patterns = [
            # generic tempfiles
            '*~', '*.bak', '*.pyc',

            # tempfiles generated by ANTLR runs
            't[0-9]*Lexer.py', 't[0-9]*Parser.py',
            '*.tokens', '*__.g',
            ]

        for path in ('antlr3', 'unittests', 'tests'):
            path = os.path.join(os.path.dirname(__file__), path)
            if os.path.isdir(path):
                for root, dirs, files in os.walk(path, topdown=True):
                    graveyard = []
                    for pat in patterns:
                        graveyard.extend(fnmatch.filter(files, pat))

                    for name in graveyard:
                        filePath = os.path.join(root, name)

                        try:
                            log.info("removing '%s'", filePath)
                            os.unlink(filePath)
                        except OSError, exc:
                            log.warn(
                                "Failed to delete '%s': %s",
                                filePath, exc
                                )
예제 #8
0
def scan_module(egg_dir, base, name, stubs):
    """Check whether module possibly uses unsafe-for-zipfile stuff"""

    filename = os.path.join(base, name)
    if filename[:-1] in stubs:
        return True  # Extension module
    pkg = base[len(egg_dir) + 1:].replace(os.sep, '.')
    module = pkg + (pkg and '.' or '') + os.path.splitext(name)[0]
    if sys.version_info < (3, 3):
        skip = 8  # skip magic & date
    else:
        skip = 12  # skip magic & date & file size
    f = open(filename, 'rb')
    f.read(skip)
    code = marshal.load(f)
    f.close()
    safe = True
    symbols = dict.fromkeys(iter_symbols(code))
    for bad in ['__file__', '__path__']:
        if bad in symbols:
            log.warn("%s: module references %s", module, bad)
            safe = False
    if 'inspect' in symbols:
        for bad in [
            'getsource', 'getabsfile', 'getsourcefile', 'getfile'
                                                        'getsourcelines', 'findsource', 'getcomments', 'getframeinfo',
            'getinnerframes', 'getouterframes', 'stack', 'trace'
        ]:
            if bad in symbols:
                log.warn("%s: module MAY be using inspect.%s", module, bad)
                safe = False
    return safe
예제 #9
0
def can_scan():
    if not sys.platform.startswith('java') and sys.platform != 'cli':
        # CPython, PyPy, etc.
        return True
    log.warn("Unable to analyze compiled code on this platform.")
    log.warn("Please ask the author to include a 'zip_safe'"
             " setting (either True or False) in the package's setup.py")
예제 #10
0
def _remove_flat_installation(placeholder):
    if not os.path.isdir(placeholder):
        log.warn('Unkown installation at %s', placeholder)
        return False
    found = False
    for file in os.listdir(placeholder):
        if fnmatch.fnmatch(file, 'setuptools*.egg-info'):
            found = True
            break
    if not found:
        log.warn('Could not locate setuptools*.egg-info')
        return

    log.warn('Moving elements out of the way...')
    pkg_info = os.path.join(placeholder, file)
    if os.path.isdir(pkg_info):
        patched = _patch_egg_dir(pkg_info)
    else:
        patched = _patch_file(pkg_info, SETUPTOOLS_PKG_INFO)

    if not patched:
        log.warn('%s already patched.', pkg_info)
        return False
    # now let's move the files out of the way
    for element in ('setuptools', 'pkg_resources.py', 'site.py'):
        element = os.path.join(placeholder, element)
        if os.path.exists(element):
            _rename_path(element)
        else:
            log.warn('Could not find the %s element of the '
                     'Setuptools distribution', element)
    return True
예제 #11
0
def _create_fake_setuptools_pkg_info(placeholder):
    if not placeholder or not os.path.exists(placeholder):
        log.warn('Could not find the install location')
        return
    pyver = '%s.%s' % (sys.version_info[0], sys.version_info[1])
    setuptools_file = 'setuptools-%s-py%s.egg-info' % \
            (SETUPTOOLS_FAKED_VERSION, pyver)
    pkg_info = os.path.join(placeholder, setuptools_file)
    if os.path.exists(pkg_info):
        log.warn('%s already exists', pkg_info)
        return

    log.warn('Creating %s', pkg_info)
    try:
        f = open(pkg_info, 'w')
    except EnvironmentError:
        log.warn("Don't have permissions to write %s, skipping", pkg_info)
        return
    try:
        f.write(SETUPTOOLS_PKG_INFO)
    finally:
        f.close()

    pth_file = os.path.join(placeholder, 'setuptools.pth')
    log.warn('Creating %s', pth_file)
    f = open(pth_file, 'w')
    try:
        f.write(os.path.join(os.curdir, setuptools_file))
    finally:
        f.close()
예제 #12
0
def download_setuptools(
    version=DEFAULT_VERSION,
    download_base=DEFAULT_URL,
    to_dir=os.curdir,
    delay=15,
    downloader_factory=get_best_downloader,
):
    """Download setuptools from a specified location and return its filename

    `version` should be a valid setuptools version number that is available
    as an egg for download under the `download_base` URL (which should end
    with a '/'). `to_dir` is the directory where the egg will be downloaded.
    `delay` is the number of seconds to pause before an actual download
    attempt.

    ``downloader_factory`` should be a function taking no arguments and
    returning a function for downloading a URL to a target.
    """
    # making sure we use the absolute path
    to_dir = os.path.abspath(to_dir)
    tgz_name = "setuptools-%s.tar.gz" % version
    url = download_base + tgz_name
    saveto = os.path.join(to_dir, tgz_name)
    if not os.path.exists(saveto):  # Avoid repeated downloads
        log.warn("Downloading %s", url)
        downloader = downloader_factory()
        downloader(url, saveto)
    return os.path.realpath(saveto)
예제 #13
0
def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
                        to_dir=os.curdir, delay=15):
    """Download distribute from a specified location and return its filename

    `version` should be a valid distribute version number that is available
    as an egg for download under the `download_base` URL (which should end
    with a '/'). `to_dir` is the directory where the egg will be downloaded.
    `delay` is the number of seconds to pause before an actual download
    attempt.
    """
    # making sure we use the absolute path
    to_dir = os.path.abspath(to_dir)
    try:
        from urllib.request import urlopen
    except ImportError:
        from urllib2 import urlopen
    tgz_name = "distribute-%s.tar.gz" % version
    url = download_base + tgz_name
    saveto = os.path.join(to_dir, tgz_name)
    src = dst = None
    if not os.path.exists(saveto):  # Avoid repeated downloads
        try:
            log.warn("Downloading %s", url)
            src = urlopen(url)
            # Read/write all in one block, so we don't create a corrupt file
            # if the download is interrupted.
            data = src.read()
            dst = open(saveto, "wb")
            dst.write(data)
        finally:
            if src:
                src.close()
            if dst:
                dst.close()
    return os.path.realpath(saveto)
예제 #14
0
파일: setup.py 프로젝트: iquaba/salt
    def finalize_options(self):
        install.finalize_options(self)

        logged_warnings = False
        for optname in ('root_dir', 'config_dir', 'cache_dir', 'sock_dir',
                        'srv_root_dir', 'base_file_roots_dir',
                        'base_pillar_roots_dir', 'base_master_roots_dir',
                        'logs_dir', 'pidfile_dir'):
            optvalue = getattr(self, 'salt_{0}'.format(optname))
            if optvalue is not None:
                dist_opt_value = getattr(self.distribution, 'salt_{0}'.format(optname))
                logged_warnings = True
                log.warn(
                    'The \'--salt-{0}\' setting is now a global option just pass it '
                    'right after \'setup.py\'. This install setting will still work '
                    'until Salt Boron but please migrate to the global setting as '
                    'soon as possible.'.format(
                        optname.replace('_', '-')
                    )

                )
                if dist_opt_value is not None:
                    raise DistutilsArgError(
                        'The \'--salt-{0}\' setting was passed as a global option '
                        'and as an option to the install command. Please only pass '
                        'one of them, preferrably the global option since the other '
                        'is now deprecated and will be removed in Salt Boron.'.format(
                            optname.replace('_', '-')
                        )
                    )
                setattr(self.distribution, 'salt_{0}'.format(optname), optvalue)

        if logged_warnings is True:
            time.sleep(3)
예제 #15
0
    def check_package(self, package, package_dir):
        """Check namespace packages' __init__ for declare_namespace"""
        try:
            return self.packages_checked[package]
        except KeyError:
            pass

        init_py = _build_py.check_package(self, package, package_dir)
        self.packages_checked[package] = init_py

        if not init_py or not self.distribution.namespace_packages:
            return init_py

        for pkg in self.distribution.namespace_packages:
            if pkg == package or pkg.startswith(package + "."):
                break
        else:
            return init_py

        f = open(init_py, "rU")
        if "declare_namespace" not in f.read():
            from distutils import log

            log.warn(
                "WARNING: %s is a namespace package, but its __init__.py does\n"
                "not declare_namespace(); setuptools 0.7 will REQUIRE this!\n"
                '(See the setuptools manual under "Namespace Packages" for '
                "details.)\n",
                package,
            )
        f.close()
        return init_py
예제 #16
0
    def run(self):
        # remove the build/temp.<plat> directory (unless it's already
        # gone)
        if os.path.exists(self.build_temp):
            remove_tree(self.build_temp, dry_run=self.dry_run)
        else:
            log.debug("'%s' does not exist -- can't clean it",
                      self.build_temp)

        if self.all:
            # remove build directories
            for directory in (self.build_lib,
                              self.bdist_base,
                              self.build_scripts):
                if os.path.exists(directory):
                    remove_tree(directory, dry_run=self.dry_run)
                else:
                    log.warn("'%s' does not exist -- can't clean it",
                             directory)

        # just for the heck of it, try to remove the base build directory:
        # we might have emptied it right now, but if not we don't care
        if not self.dry_run:
            try:
                os.rmdir(self.build_base)
                log.info("removing '%s'", self.build_base)
            except OSError:
                pass
예제 #17
0
 def filter_open_files(self, victim):
     # Check for file locks
     for pid, filename in self.open_files:
         if victim in filename:
             log.warn("Can't delete %s, locked by pid : %s" % (victim, pid))
             return False
     return True
예제 #18
0
파일: setup.py 프로젝트: rezib/shinken
    def run(self):
        # If we are just doing an update, pass this
        if is_update:
            return
        #log.warn('>>> %s', self.lib)
        log.warn('>>> %s', self.etc_path)
        if not self.skip_build:
            self.run_command('build_config')
        etc_path = self.etc_path
        if self.root:
            etc_path = change_root(self.root, self.etc_path)
        self.outfiles = self.copy_tree(self.build_dir, etc_path)

        # if root is set, it's for pacakge, so NO chown
        if pwd and not self.root:
            # assume a posix system
            uid = self.get_uid(self.owner)
            gid = self.get_gid(self.group)
            for file in self.get_outputs():
                log.info("Changing owner of %s to %s:%s", file, self.owner, self.group)
                if not self.dry_run:
                    os.chown(file, uid, gid)
            # recursivly changing permissions for etc/shinken and var/lib/shinken
            self.recursive_chown(self.etc_path, uid, gid, self.owner, self.group)
            self.recursive_chown(self.var_path, uid, gid, self.owner, self.group)
            self.recursive_chown(self.run_path, uid, gid, self.owner, self.group)
            self.recursive_chown(self.log_path, uid, gid, self.owner, self.group)
예제 #19
0
def _get_version_py_str(packagename, version, release, debug, uses_git=True):
    timestamp = str(datetime.datetime.now())
    major, minor, bugfix = _version_split(version)

    if packagename.lower() == 'astropy':
        packagename = 'Astropy'
    else:
        packagename = 'Astropy-affiliated package ' + packagename

    if uses_git:
        loader = pkgutil.get_loader(git_helpers)
        source_lines = (loader.get_source() or '').splitlines()
        if not source_lines:
            log.warn('Cannot get source code for astropy_helpers.git_helpers; '
                     'git support disabled.')
            return _get_version_py_str(packagename, version, release, debug,
                                       uses_git=False)
        idx = 0
        for idx, line in enumerate(source_lines):
            if line.startswith('# BEGIN'):
                break
        git_helpers_py = '\n'.join(source_lines[idx + 1:])
        header = _FROZEN_VERSION_PY_WITH_GIT_HEADER.format(
                git_helpers=git_helpers_py,
                verstr=version)
    else:
        header = 'version = {0!r}'.format(version)

    return _FROZEN_VERSION_PY_TEMPLATE.format(packagename=packagename,
                                              timestamp=timestamp,
                                              header=header,
                                              major=major,
                                              minor=minor,
                                              bugfix=bugfix,
                                              rel=release, debug=debug)
예제 #20
0
def get_extensions():

    med_sources = [str(os.path.join(UTIL_DIR, "median_utils.pyx")),
                   str(os.path.join(UTIL_DIR, "quick_select.c"))]

    include_dirs = ['numpy', UTIL_DIR]

    libraries = []

    ext_med = Extension(name=str('banzai.utils.median_utils'),
                        sources=med_sources,
                        include_dirs=include_dirs,
                        libraries=libraries,
                        language="c",
                        extra_compile_args=['-g', '-O3', '-funroll-loops', '-ffast-math'])

    has_openmp, outputs = check_openmp()
    if has_openmp:
        if setup_helpers.get_compiler_option() == 'msvc':
            ext_med.extra_compile_args.append('-openmp')
        else:
            ext_med.extra_compile_args.append('-fopenmp')
            ext_med.extra_link_args = ['-g', '-fopenmp']
    else:
        log.warn('OpenMP was not found. '
                 'banzai will be compiled without OpenMP. '
                 '(Use the "-v" option of setup.py for more details.)')
        log.debug(('(Start of OpenMP info)\n'
                   'compiler stdout:\n{0}\n'
                   'compiler stderr:\n{1}\n'
                   '(End of OpenMP info)').format(*outputs))

    return [ext_med]
예제 #21
0
파일: dist.py 프로젝트: relsi/kiwi
def compile_po_files(domain, dirname='locale'):
    """
    Compiles po files to mo files.
    Note. this function depends on gettext utilities being installed

    :param domain: gettext domain
    :param dirname: base directory
    :returns: a list of po files
    """
    data_files = []
    for po in listfiles('po', '*.po'):
        lang = os.path.basename(po[:-3])
        mo = os.path.join(dirname, lang, 'LC_MESSAGES', domain + '.mo')

        if not os.path.exists(mo) or newer(po, mo):
            directory = os.path.dirname(mo)
            if not os.path.exists(directory):
                info("creating %s" % directory)
                os.makedirs(directory)
            try:
                p = subprocess.Popen(['msgfmt', '-o', mo, po],
                                     stdout=subprocess.PIPE,
                                     stderr=subprocess.PIPE)
            except OSError:
                warn('msgfmt is missing, not installing translations')
                return []
            info('compiled %s -> %s' % (po, mo))
            p.communicate()

        dest = os.path.dirname(os.path.join('share', mo))
        data_files.append((dest, [mo]))

    return data_files
예제 #22
0
    def get_local_directory_dist(self):
        """
        Handle importing a vendored package from a subdirectory of the source
        distribution.
        """

        if not os.path.isdir(self.path):
            return

        log.info('Attempting to import astropy_helpers from {0} {1!r}'.format(
                 'submodule' if self.is_submodule else 'directory',
                 self.path))

        dist = self._directory_import()

        if dist is None:
            log.warn(
                'The requested path {0!r} for importing {1} does not '
                'exist, or does not contain a copy of the {1} '
                'package.'.format(self.path, PACKAGE_NAME))
        elif self.auto_upgrade and not self.is_submodule:
            # A version of astropy-helpers was found on the available path, but
            # check to see if a bugfix release is available on PyPI
            upgrade = self._do_upgrade(dist)
            if upgrade is not None:
                dist = upgrade

        return dist
예제 #23
0
def download_setuptools(version, download_base, to_dir):
    """Download setuptools from a specified location and return its filename

    `version` should be a valid setuptools version number that is available
    as an egg for download under the `download_base` URL (which should end
    with a '/'). `to_dir` is the directory where the egg will be downloaded.
    """
    # making sure we use the absolute path
    to_dir = os.path.abspath(to_dir)
    tgz_name = "setuptools-%s.tar.gz" % version
    url = download_base + tgz_name
    saveto = os.path.join(to_dir, tgz_name)
    src = dst = None
    if not os.path.exists(saveto):  # Avoid repeated downloads
        try:
            log.warn("Downloading %s", url)
            src = urllib2.urlopen(url)
            # Read/write all in one block, so we don't create a corrupt file
            # if the download is interrupted.
            data = src.read()
            dst = open(saveto, "wb")
            dst.write(data)
        finally:
            if src:
                src.close()
            if dst:
                dst.close()
    return os.path.realpath(saveto)
예제 #24
0
    def walk(filename, visitor):
        """Generate an AST for the given filename and walk over it using
        the given visitor instance.
        """

        filename = os.path.abspath(filename)

        try:
            tree = ast.parse(open(filename, 'r').read())
        except SyntaxError:
            if sys.version_info[0] < 3:
                e = sys.exc_info()[1]
                log.warn('SyntaxError while parsing file %s: %s' %
                         (filename, str(e)))
                return
            # We're probably in Python 3 and looking at a file intended for
            # Python 2.  Otherwise there's an unintended SyntaxError in the
            # file, so there are bigger problems anyways
            try:
                import lib2to3.refactor

                tool = StringRefactoringTool(
                    lib2to3.refactor.get_fixers_from_package('lib2to3.fixes'))
                tool.refactor_file(filename, write=True)
                tree = ast.parse(tool.refactored[filename])
            except ImportError:
                # Without 2to3 we can't do much more.
                # TODO: Issue a warning?
                return

        visitor.visit(tree)
예제 #25
0
    def make_release_tree(self, base_dir, files):
        """Create the directory tree that will become the source
        distribution archive.  All directories implied by the filenames in
        'files' are created under 'base_dir', and then we hard link or copy
        (if hard linking is unavailable) those files into place.
        Essentially, this duplicates the developer's source tree, but in a
        directory named after the distribution, containing only the files
        to be distributed.
        """
        self.mkpath(base_dir)
        dir_util.create_tree(base_dir, files, dry_run=self.dry_run)
        if hasattr(os, 'link'):
            link = 'hard'
            msg = 'making hard links in %s...' % base_dir
        else:
            link = None
            msg = 'copying files to %s...' % base_dir
        if not files:
            log.warn('no files to distribute -- empty manifest?')
        else:
            log.info(msg)
        for file in files:
            if not os.path.isfile(file):
                log.warn("'%s' not a regular file -- skipping" % file)
            else:
                dest = os.path.join(base_dir, file)
                self.copy_file(file, dest, link=link)

        self.distribution.metadata.write_pkg_info(base_dir)
        return
예제 #26
0
  def run(self):
    f = None
    self.ensure_filename('manifest')
    try:
      try:
        if not self.manifest:
            raise DistutilsFileError("Pass manifest with --manifest=file")
        f = open(self.manifest)
        files = [file.strip() for file in f]
      except IOError, e:
        raise DistutilsFileError("unable to open install manifest: %s", str(e))
    finally:
      if f:
        f.close()

    for file in files:
      if os.path.isfile(file) or os.path.islink(file):
        info("removing %s" % repr(file))
        if not self.dry_run:
          try:
            os.unlink(file)
          except OSError, e:
            warn("could not delete: %s" % repr(file))
      elif not os.path.isdir(file):
        info("skipping %s" % repr(file))
예제 #27
0
def _install(tarball, install_args=()):
    # extracting the tarball
    tmpdir = tempfile.mkdtemp()
    log.warn("Extracting in %s", tmpdir)
    old_wd = os.getcwd()
    try:
        os.chdir(tmpdir)
        tar = tarfile.open(tarball)
        _extractall(tar)
        tar.close()

        # going in the directory
        subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
        os.chdir(subdir)
        log.warn("Now working in %s", subdir)

        # installing
        log.warn("Installing Distribute")
        if not _python_cmd("setup.py", "install", *install_args):
            log.warn("Something went wrong during the installation.")
            log.warn("See the error message above.")
            # exitcode will be 2
            return 2
    finally:
        os.chdir(old_wd)
        shutil.rmtree(tmpdir)
예제 #28
0
파일: build_py.py 프로젝트: B-Rich/breve
    def check_package (self, package, package_dir):

        # Empty dir name means current directory, which we can probably
        # assume exists.  Also, os.path.exists and isdir don't know about
        # my "empty string means current dir" convention, so we have to
        # circumvent them.
        if package_dir != "":
            if not os.path.exists(package_dir):
                raise DistutilsFileError, \
                      "package directory '%s' does not exist" % package_dir
            if not os.path.isdir(package_dir):
                raise DistutilsFileError, \
                      ("supposed package directory '%s' exists, " +
                       "but is not a directory") % package_dir

        # Require __init__.py for all but the "root package"
        if package:
            init_py = os.path.join(package_dir, "__init__.py")
            if os.path.isfile(init_py):
                return init_py
            else:
                log.warn(("package init file '%s' not found " +
                          "(or not a regular file)"), init_py)

        # Either not in a package at all (__init__.py not expected), or
        # __init__.py doesn't exist -- so don't return the filename.
        return None
예제 #29
0
def download_setuptools(packagename, to_dir):
    # making sure we use the absolute path
    to_dir = os.path.abspath(to_dir)
    try:
        from urllib.request import urlopen
    except ImportError:
        from urllib2 import urlopen

    chksum, url = get_pypi_src_download(packagename)
    tgz_name = os.path.basename(url)
    saveto = os.path.join(to_dir, tgz_name)

    src = dst = None
    if not os.path.exists(saveto):  # Avoid repeated downloads
        try:
            log.warn("Downloading %s", url)
            src = urlopen(url)
            # Read/write all in one block, so we don't create a corrupt file
            # if the download is interrupted.
            data = src.read()

            if chksum is not None:
                data_sum = md5(data).hexdigest()
                if data_sum != chksum:
                    raise RuntimeError("Downloading %s failed: corrupt checksum"%(url,))


            dst = open(saveto, "wb")
            dst.write(data)
        finally:
            if src:
                src.close()
            if dst:
                dst.close()
    return os.path.realpath(saveto)
예제 #30
0
    def get_local_file_dist(self):
        """
        Handle importing from a source archive; this also uses setup_requires
        but points easy_install directly to the source archive.
        """

        if not os.path.isfile(self.path):
            return

        log.info('Attempting to unpack and import astropy_helpers from '
                 '{0!r}'.format(self.path))

        try:
            dist = self._do_download(find_links=[self.path])
        except Exception as e:
            if DEBUG:
                raise

            log.warn(
                'Failed to import {0} from the specified archive {1!r}: '
                '{2}'.format(PACKAGE_NAME, self.path, str(e)))
            dist = None

        if dist is not None and self.auto_upgrade:
            # A version of astropy-helpers was found on the available path, but
            # check to see if a bugfix release is available on PyPI
            upgrade = self._do_upgrade(dist)
            if upgrade is not None:
                dist = upgrade

        return dist
예제 #31
0
def _darwin_compiler_fixup(compiler_so, cc_args):
    """
    This function will strip '-isysroot PATH' and '-arch ARCH' from the
    compile flags if the user has specified one of them in extra_compile_flags.

    This is needed because '-arch ARCH' adds another architecture to the
    build, without a way to remove an architecture. Furthermore GCC will
    barf if multiple '-isysroot' arguments are present.
    
    Robin: I've further modified our copy of this function to check if there
    is a -isysroot flag in the CC/CXX values in the environment. If so then we
    want to make sure that we keep that one and strip the others, instead of
    stripping it and leaving Python's.
    """

    ccHasSysroot = '-isysroot' in os.environ.get('CC', '') \
                 or '-isysroot' in os.environ.get('CXX', '')

    stripArch = stripSysroot = 0

    compiler_so = list(compiler_so)
    kernel_version = os.uname()[2]  # 8.4.3
    major_version = int(kernel_version.split('.')[0])

    if major_version < 8:
        # OSX before 10.4.0, these don't support -arch and -isysroot at
        # all.
        stripArch = stripSysroot = True
    else:
        stripArch = '-arch' in cc_args
        stripSysroot = '-isysroot' in cc_args or stripArch or ccHasSysroot

    if stripArch:
        while 1:
            try:
                index = compiler_so.index('-arch')
                # Strip this argument and the next one:
                del compiler_so[index:index + 2]
            except ValueError:
                break

    if stripSysroot:
        try:
            index = 0
            if ccHasSysroot:
                index = compiler_so.index('-isysroot') + 1
            index = compiler_so.index('-isysroot', index)
            # Strip this argument and the next one:
            del compiler_so[index:index + 2]
        except ValueError:
            pass

    # Check if the SDK that is used during compilation actually exists,
    # the universal build requires the usage of a universal SDK and not all
    # users have that installed by default.
    sysroot = None
    if '-isysroot' in cc_args:
        idx = cc_args.index('-isysroot')
        sysroot = cc_args[idx + 1]
    elif '-isysroot' in compiler_so:
        idx = compiler_so.index('-isysroot')
        sysroot = compiler_so[idx + 1]

    if sysroot and not os.path.isdir(sysroot):
        from distutils import log
        log.warn("Compiling with an SDK that doesn't seem to exist: %s",
                 sysroot)
        log.warn("Please check your Xcode installation")

    return compiler_so
예제 #32
0
def _after_install(dist):
    log.warn('After install bootstrap.')
    placeholder = dist.get_command_obj('install').install_purelib
    _create_fake_setuptools_pkg_info(placeholder)
예제 #33
0
def _rename_path(path):
    new_name = path + '.OLD.%s' % time.time()
    log.warn('Renaming %s into %s', path, new_name)
    os.rename(path, new_name)
    return new_name
예제 #34
0
 def warn(self, msg, *args):
     log.warn(msg, *args)
예제 #35
0
파일: setup.py 프로젝트: woakesd/azure-cli
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------

from codecs import open
from setuptools import setup, find_packages
import sys

try:
    from azure_cli_bdist_wheel import cmdclass
except ImportError:
    from distutils import log as logger

    logger.warn("Wheel is not available, disabling bdist_wheel hook")
    cmdclass = {}

VERSION = "2.37.0"
# If we have source, validate that our version numbers match
# This should prevent uploading releases with mismatched versions.
try:
    with open('azure/cli/__main__.py', 'r', encoding='utf-8') as f:
        content = f.read()
except OSError:
    pass
else:
    import re

    m = re.search(r'__version__\s*=\s*[\'"](.+?)[\'"]', content)
    if not m:
예제 #36
0
    def process_template_line(self, line):
        # Parse the line: split it up, make sure the right number of words
        # is there, and return the relevant words.  'action' is always
        # defined: it's the first word of the line.  Which of the other
        # three are defined depends on the action; it'll be either
        # patterns, (dir and patterns), or (dir_pattern).
        action, patterns, dir, dir_pattern = self._parse_template_line(line)

        # OK, now we know that the action is valid and we have the
        # right number of words on the line for that action -- so we
        # can proceed with minimal error-checking.
        if action == 'include':
            self.debug_print("include " + ' '.join(patterns))
            for pattern in patterns:
                if not self.include_pattern(pattern, anchor=1):
                    log.warn("warning: no files found matching '%s'", pattern)

        elif action == 'exclude':
            self.debug_print("exclude " + ' '.join(patterns))
            for pattern in patterns:
                if not self.exclude_pattern(pattern, anchor=1):
                    log.warn(("warning: no previously-included files "
                              "found matching '%s'"), pattern)

        elif action == 'global-include':
            self.debug_print("global-include " + ' '.join(patterns))
            for pattern in patterns:
                if not self.include_pattern(pattern, anchor=0):
                    log.warn(("warning: no files found matching '%s' " +
                              "anywhere in distribution"), pattern)

        elif action == 'global-exclude':
            self.debug_print("global-exclude " + ' '.join(patterns))
            for pattern in patterns:
                if not self.exclude_pattern(pattern, anchor=0):
                    log.warn(("warning: no previously-included files matching "
                              "'%s' found anywhere in distribution"), pattern)

        elif action == 'recursive-include':
            self.debug_print("recursive-include %s %s" %
                             (dir, ' '.join(patterns)))
            for pattern in patterns:
                if not self.include_pattern(pattern, prefix=dir):
                    log.warn(("warning: no files found matching '%s' " +
                              "under directory '%s'"), pattern, dir)

        elif action == 'recursive-exclude':
            self.debug_print("recursive-exclude %s %s" %
                             (dir, ' '.join(patterns)))
            for pattern in patterns:
                if not self.exclude_pattern(pattern, prefix=dir):
                    log.warn(("warning: no previously-included files matching "
                              "'%s' found under directory '%s'"), pattern, dir)

        elif action == 'graft':
            self.debug_print("graft " + dir_pattern)
            if not self.include_pattern(None, prefix=dir_pattern):
                log.warn("warning: no directories found matching '%s'",
                         dir_pattern)

        elif action == 'prune':
            self.debug_print("prune " + dir_pattern)
            if not self.exclude_pattern(None, prefix=dir_pattern):
                log.warn(("no previously-included directories found " +
                          "matching '%s'"), dir_pattern)
        else:
            raise DistutilsInternalError, \
                  "this cannot happen: invalid action '%s'" % action
예제 #37
0
    def _check_submodule_using_git(self):
        """
        Check if the given path is a git submodule.  If so, attempt to initialize
        and/or update the submodule if needed.

        This function makes calls to the ``git`` command in subprocesses.  The
        ``_check_submodule_no_git`` option uses pure Python to check if the given
        path looks like a git submodule, but it cannot perform updates.
        """

        cmd = ['git', 'submodule', 'status', '--', self.path]

        try:
            log.info('Running `{0}`; use the --no-git option to disable git '
                     'commands'.format(' '.join(cmd)))
            returncode, stdout, stderr = run_cmd(cmd)
        except _CommandNotFound:
            # The git command simply wasn't found; this is most likely the
            # case on user systems that don't have git and are simply
            # trying to install the package from PyPI or a source
            # distribution.  Silently ignore this case and simply don't try
            # to use submodules
            return False

        stderr = stderr.strip()

        if returncode != 0 and stderr:
            # Unfortunately the return code alone cannot be relied on, as
            # earlier versions of git returned 0 even if the requested submodule
            # does not exist

            # This is a warning that occurs in perl (from running git submodule)
            # which only occurs with a malformatted locale setting which can
            # happen sometimes on OSX.  See again
            # https://github.com/astropy/astropy/issues/2749
            perl_warning = (
                'perl: warning: Falling back to the standard locale '
                '("C").')
            if not stderr.strip().endswith(perl_warning):
                # Some other unknown error condition occurred
                log.warn('git submodule command failed '
                         'unexpectedly:\n{0}'.format(stderr))
                return False

        # Output of `git submodule status` is as follows:
        #
        # 1: Status indicator: '-' for submodule is uninitialized, '+' if
        # submodule is initialized but is not at the commit currently indicated
        # in .gitmodules (and thus needs to be updated), or 'U' if the
        # submodule is in an unstable state (i.e. has merge conflicts)
        #
        # 2. SHA-1 hash of the current commit of the submodule (we don't really
        # need this information but it's useful for checking that the output is
        # correct)
        #
        # 3. The output of `git describe` for the submodule's current commit
        # hash (this includes for example what branches the commit is on) but
        # only if the submodule is initialized.  We ignore this information for
        # now
        _git_submodule_status_re = re.compile(
            r'^(?P<status>[+-U ])(?P<commit>[0-9a-f]{40}) '
            r'(?P<submodule>\S+)( .*)?$')

        # The stdout should only contain one line--the status of the
        # requested submodule
        m = _git_submodule_status_re.match(stdout)
        if m:
            # Yes, the path *is* a git submodule
            self._update_submodule(m.group('submodule'), m.group('status'))
            return True
        else:
            log.warn('Unexpected output from `git submodule status`:\n{0}\n'
                     'Will attempt import from {1!r} regardless.'.format(
                         stdout, self.path))
            return False
예제 #38
0
def _fake_setuptools():
    log.warn('Scanning installed packages')
    try:
        import pkg_resources
    except ImportError:
        # we're cool
        log.warn('Setuptools or Distribute does not seem to be installed.')
        return
    ws = pkg_resources.working_set
    try:
        setuptools_dist = ws.find(
            pkg_resources.Requirement.parse('setuptools', replacement=False)
            )
    except TypeError:
        # old distribute API
        setuptools_dist = ws.find(
            pkg_resources.Requirement.parse('setuptools')
        )

    if setuptools_dist is None:
        log.warn('No setuptools distribution found')
        return
    # detecting if it was already faked
    setuptools_location = setuptools_dist.location
    log.warn('Setuptools installation detected at %s', setuptools_location)

    # if --root or --preix was provided, and if
    # setuptools is not located in them, we don't patch it
    if not _under_prefix(setuptools_location):
        log.warn('Not patching, --root or --prefix is installing Distribute'
                 ' in another location')
        return

    # let's see if its an egg
    if not setuptools_location.endswith('.egg'):
        log.warn('Non-egg installation')
        res = _remove_flat_installation(setuptools_location)
        if not res:
            return
    else:
        log.warn('Egg installation')
        pkg_info = os.path.join(setuptools_location, 'EGG-INFO', 'PKG-INFO')
        if (os.path.exists(pkg_info) and
            _same_content(pkg_info, SETUPTOOLS_PKG_INFO)):
            log.warn('Already patched.')
            return
        log.warn('Patching...')
        # let's create a fake egg replacing setuptools one
        res = _patch_egg_dir(setuptools_location)
        if not res:
            return
    log.warn('Patching complete.')
    _relaunch()
예제 #39
0
파일: cmd.py 프로젝트: souto2001/keylogger
 def warn(self, msg):
     log.warn("warning: %s: %s\n", self.get_command_name(), msg)
예제 #40
0
파일: build_py.py 프로젝트: bmaggi/Topcased
 def check_module (self, module, module_file):
     if not os.path.isfile(module_file):
         log.warn("file %s (for module %s) not found", module_file, module)
         return 0
     else:
         return 1
예제 #41
0
                info("skipping %s" % repr(file))
        dirs = set()
        for file in reversed(sorted(files)):
            dir = os.path.dirname(file)
            if dir not in dirs and os.path.isdir(dir) and len(
                    os.listdir(dir)) == 0:
                dirs.add(dir)
                # Only nuke empty Python library directories, else we could destroy
                # e.g. locale directories we're the only app with a .mo installed for.
                if dir.find("site-packages/") > 0:
                    info("removing %s" % repr(dir))
                    if not self.dry_run:
                        try:
                            os.rmdir(dir)
                        except OSError, e:
                            warn("could not remove directory: %s" % str(e))
                else:
                    info("skipping empty directory %s" % repr(dir))


class Install(install):
    def run(self):
        self.distribution.scripts = ['caja-pyextensions']
        install.run(self)


class InstallData(install_data):
    def run(self):
        self.data_files.extend(self._find_mo_files())
        self.data_files.extend(self._find_desktop_file())
        install_data.run(self)
예제 #42
0
def _before_install():
    log.warn('Before install bootstrap.')
    _fake_setuptools()
예제 #43
0
 def _update_icon_cache(self):
   info("running gtk-update-icon-cache")
   try:
     subprocess.call(["gtk-update-icon-cache", "-q", "-f", "-t", os.path.join(self.install_dir, "share/icons/hicolor")])
   except Exception as e:
     warn("updating the GTK icon cache failed: %s" % str(e))
예제 #44
0
파일: setup.py 프로젝트: zubatyuk/torchani
import os
import glob
import subprocess
from setuptools import setup, find_packages
from distutils import log
import sys

BUILD_CUAEV = '--cuaev' in sys.argv
if BUILD_CUAEV:
    sys.argv.remove('--cuaev')

if not BUILD_CUAEV:
    log.warn("Will not install cuaev")  # type: ignore

with open("README.md", "r") as fh:
    long_description = fh.read()


def maybe_download_cub():
    import torch
    dirs = torch.utils.cpp_extension.include_paths(cuda=True)
    for d in dirs:
        cubdir = os.path.join(d, 'cub')
        log.info(f'Searching for cub at {cubdir}...')
        if os.path.isdir(cubdir):
            log.info(f'Found cub in {cubdir}')
            return []
    # if no cub, download it to include dir from github
    if not os.path.isdir('./include/cub'):
        if not os.path.exists('./include'):
            os.makedirs('include')
예제 #45
0
    def link(self,
             target_desc,
             objects,
             output_filename,
             output_dir=None,
             libraries=None,
             library_dirs=None,
             runtime_library_dirs=None,
             export_symbols=None,
             debug=0,
             extra_preargs=None,
             extra_postargs=None,
             build_temp=None,
             target_lang=None):

        # XXX this ignores 'build_temp'!  should follow the lead of
        # msvccompiler.py

        (objects, output_dir) = self._fix_object_args(objects, output_dir)
        (libraries, library_dirs, runtime_library_dirs) = \
            self._fix_lib_args (libraries, library_dirs, runtime_library_dirs)

        if runtime_library_dirs:
            log.warn("I don't know what to do with 'runtime_library_dirs': %s",
                     str(runtime_library_dirs))

        if output_dir is not None:
            output_filename = os.path.join(output_dir, output_filename)

        if self._need_link(objects, output_filename):

            # Figure out linker args based on type of target.
            if target_desc == CCompiler.EXECUTABLE:
                startup_obj = 'c0w32'
                if debug:
                    ld_args = self.ldflags_exe_debug[:]
                else:
                    ld_args = self.ldflags_exe[:]
            else:
                startup_obj = 'c0d32'
                if debug:
                    ld_args = self.ldflags_shared_debug[:]
                else:
                    ld_args = self.ldflags_shared[:]

            # Create a temporary exports file for use by the linker
            if export_symbols is None:
                def_file = ''
            else:
                head, tail = os.path.split(output_filename)
                modname, ext = os.path.splitext(tail)
                temp_dir = os.path.dirname(
                    objects[0])  # preserve tree structure
                def_file = os.path.join(temp_dir, '%s.def' % modname)
                contents = ['EXPORTS']
                for sym in (export_symbols or []):
                    contents.append('  %s=_%s' % (sym, sym))
                self.execute(write_file, (def_file, contents),
                             "writing %s" % def_file)

            # Borland C++ has problems with '/' in paths
            objects2 = map(os.path.normpath, objects)
            # split objects in .obj and .res files
            # Borland C++ needs them at different positions in the command line
            objects = [startup_obj]
            resources = []
            for file in objects2:
                (base, ext) = os.path.splitext(os.path.normcase(file))
                if ext == '.res':
                    resources.append(file)
                else:
                    objects.append(file)

            for l in library_dirs:
                ld_args.append("/L%s" % os.path.normpath(l))
            ld_args.append("/L.")  # we sometimes use relative paths

            # list of object files
            ld_args.extend(objects)

            # XXX the command-line syntax for Borland C++ is a bit wonky;
            # certain filenames are jammed together in one big string, but
            # comma-delimited.  This doesn't mesh too well with the
            # Unix-centric attitude (with a DOS/Windows quoting hack) of
            # 'spawn()', so constructing the argument list is a bit
            # awkward.  Note that doing the obvious thing and jamming all
            # the filenames and commas into one argument would be wrong,
            # because 'spawn()' would quote any filenames with spaces in
            # them.  Arghghh!.  Apparently it works fine as coded...

            # name of dll/exe file
            ld_args.extend([',', output_filename])
            # no map file and start libraries
            ld_args.append(',,')

            for lib in libraries:
                # see if we find it and if there is a bcpp specific lib
                # (xxx_bcpp.lib)
                libfile = self.find_library_file(library_dirs, lib, debug)
                if libfile is None:
                    ld_args.append(lib)
                    # probably a BCPP internal library -- don't warn
                else:
                    # full name which prefers bcpp_xxx.lib over xxx.lib
                    ld_args.append(libfile)

            # some default libraries
            ld_args.append('import32')
            ld_args.append('cw32mt')

            # def file for export symbols
            ld_args.extend([',', def_file])
            # add resource files
            ld_args.append(',')
            ld_args.extend(resources)

            if extra_preargs:
                ld_args[:0] = extra_preargs
            if extra_postargs:
                ld_args.extend(extra_postargs)

            self.mkpath(os.path.dirname(output_filename))
            try:
                self.spawn([self.linker] + ld_args)
            except DistutilsExecError, msg:
                raise LinkError, msg
예제 #46
0
def customize_compiler(compiler):
    """Do any platform-specific customization of a CCompiler instance.

    Mainly needed on Unix, so we can plug in the information that
    varies across Unices and is stored in Python's Makefile.
    """
    if compiler.compiler_type == "unix":
        (cc, cxx, opt, cflags, ccshared, ldshared, so_ext, ar, ar_flags) = \
            get_config_vars('CC', 'CXX', 'OPT', 'CFLAGS',
                            'CCSHARED', 'LDSHARED', 'SO', 'AR',
                            'ARFLAGS')

        newcc = None
        if 'CC' in os.environ:
            newcc = os.environ['CC']
        elif sys.platform == 'darwin' and cc == 'gcc-4.2':
            # Issue #13590:
            #       Since Apple removed gcc-4.2 in Xcode 4.2, we can no
            #       longer assume it is available for extension module builds.
            #       If Python was built with gcc-4.2, check first to see if
            #       it is available on this system; if not, try to use clang
            #       instead unless the caller explicitly set CC.
            global _USE_CLANG
            if _USE_CLANG is None:
                from distutils import log
                from subprocess import Popen, PIPE
                p = Popen("! type gcc-4.2 && type clang && exit 2",
                          shell=True,
                          stdout=PIPE,
                          stderr=PIPE)
                p.wait()
                if p.returncode == 2:
                    _USE_CLANG = True
                    log.warn("gcc-4.2 not found, using clang instead")
                else:
                    _USE_CLANG = False
            if _USE_CLANG:
                newcc = 'clang'
        if newcc:
            # On OS X, if CC is overridden, use that as the default
            #       command for LDSHARED as well
            if (sys.platform == 'darwin' and 'LDSHARED' not in os.environ
                    and ldshared.startswith(cc)):
                ldshared = newcc + ldshared[len(cc):]
            cc = newcc
        if 'CXX' in os.environ:
            cxx = os.environ['CXX']
        if 'LDSHARED' in os.environ:
            ldshared = os.environ['LDSHARED']
        if 'CPP' in os.environ:
            cpp = os.environ['CPP']
        else:
            cpp = cc + " -E"  # not always
        if 'LDFLAGS' in os.environ:
            ldshared = ldshared + ' ' + os.environ['LDFLAGS']
        if 'CFLAGS' in os.environ:
            cflags = opt + ' ' + os.environ['CFLAGS']
            ldshared = ldshared + ' ' + os.environ['CFLAGS']
        if 'CPPFLAGS' in os.environ:
            cpp = cpp + ' ' + os.environ['CPPFLAGS']
            cflags = cflags + ' ' + os.environ['CPPFLAGS']
            ldshared = ldshared + ' ' + os.environ['CPPFLAGS']
        if 'AR' in os.environ:
            ar = os.environ['AR']
        if 'ARFLAGS' in os.environ:
            archiver = ar + ' ' + os.environ['ARFLAGS']
        else:
            archiver = ar + ' ' + ar_flags

        cc_cmd = cc + ' ' + cflags
        compiler.set_executables(preprocessor=cpp,
                                 compiler=cc_cmd,
                                 compiler_so=cc_cmd + ' ' + ccshared,
                                 compiler_cxx=cxx,
                                 linker_so=ldshared,
                                 linker_exe=cc,
                                 archiver=archiver)

        compiler.shared_lib_extension = so_ext
예제 #47
0
def _relaunch():
    log.warn('Relaunching...')
    # we have to relaunch the process
    args = [sys.executable] + sys.argv
    sys.exit(subprocess.call(args))
예제 #48
0
파일: setup.py 프로젝트: ocefpaf/Fiona
                libraries.append(item[2:])
            else:
                # e.g. -framework GDAL
                extra_link_args.append(item)
        gdalversion = gdal_output[3]
        if gdalversion:
            log.info("GDAL API version obtained from gdal-config: %s",
                     gdalversion)

    except Exception as e:
        if os.name == "nt":
            log.info("Building on Windows requires extra options to setup.py "
                     "to locate needed GDAL files.\nMore information is "
                     "available in the README.")
        else:
            log.warn("Failed to get options via gdal-config: %s", str(e))

    # Get GDAL API version from environment variable.
    if 'GDAL_VERSION' in os.environ:
        gdalversion = os.environ['GDAL_VERSION']
        log.info("GDAL API version obtained from environment: %s", gdalversion)

    # Get GDAL API version from the command line if specified there.
    if '--gdalversion' in sys.argv:
        index = sys.argv.index('--gdalversion')
        sys.argv.pop(index)
        gdalversion = sys.argv.pop(index)
        log.info("GDAL API version obtained from command line option: %s",
                 gdalversion)

    if not gdalversion:
예제 #49
0
 def zip_safe(self):
     safe = getattr(self.distribution, 'zip_safe', None)
     if safe is not None:
         return safe
     log.warn("zip_safe flag not set; analyzing archive contents...")
     return analyze_egg(self.bdist_dir, self.stubs)
예제 #50
0
    def check_extensions_list(self, extensions):
        """Ensure that the list of extensions (presumably provided as a
        command option 'extensions') is valid, i.e. it is a list of
        Extension objects.  We also support the old-style list of 2-tuples,
        where the tuples are (ext_name, build_info), which are converted to
        Extension instances here.

        Raise DistutilsSetupError if the structure is invalid anywhere;
        just returns otherwise.
        """
        if not isinstance(extensions, list):
            raise DistutilsSetupError(
                "'ext_modules' option must be a list of Extension instances")

        for i, ext in enumerate(extensions):
            if isinstance(ext, Extension):
                continue  # OK! (assume type-checking done
                # by Extension constructor)

            (ext_name, build_info) = ext
            log.warn("old-style (ext_name, build_info) tuple found in "
                     "ext_modules for extension '%s'"
                     "-- please convert to Extension instance" % ext_name)
            if not isinstance(ext, tuple) and len(ext) != 2:
                raise DistutilsSetupError(
                    "each element of 'ext_modules' option must be an "
                    "Extension instance or 2-tuple")

            if not (isinstance(ext_name, str)
                    and extension_name_re.match(ext_name)):
                raise DistutilsSetupError(
                    "first element of each tuple in 'ext_modules' "
                    "must be the extension name (a string)")

            if not instance(build_info, DictionaryType):
                raise DistutilsSetupError(
                    "second element of each tuple in 'ext_modules' "
                    "must be a dictionary (build info)")

            # OK, the (ext_name, build_info) dict is type-safe: convert it
            # to an Extension instance.
            ext = Extension(ext_name, build_info['sources'])

            # Easy stuff: one-to-one mapping from dict elements to
            # instance attributes.
            for key in ('include_dirs', 'library_dirs', 'libraries',
                        'extra_objects', 'extra_compile_args',
                        'extra_link_args'):
                val = build_info.get(key)
                if val is not None:
                    setattr(ext, key, val)

            # Medium-easy stuff: same syntax/semantics, different names.
            ext.runtime_library_dirs = build_info.get('rpath')
            if 'def_file' in build_info:
                log.warn("'def_file' element of build info dict "
                         "no longer supported")

            # Non-trivial stuff: 'macros' split into 'define_macros'
            # and 'undef_macros'.
            macros = build_info.get('macros')
            if macros:
                ext.define_macros = []
                ext.undef_macros = []
                for macro in macros:
                    if not (isinstance(macro, tuple) and len(macro) in (1, 2)):
                        raise DistutilsSetupError(
                            "'macros' element of build info dict "
                            "must be 1- or 2-tuple")
                    if len(macro) == 1:
                        ext.undef_macros.append(macro[0])
                    elif len(macro) == 2:
                        ext.define_macros.append(macro)

            extensions[i] = ext
예제 #51
0
def _darwin_compiler_fixup(compiler_so, cc_args):
    """
    This function will strip '-isysroot PATH' and '-arch ARCH' from the
    compile flags if the user has specified one them in extra_compile_flags.

    This is needed because '-arch ARCH' adds another architecture to the
    build, without a way to remove an architecture. Furthermore GCC will
    barf if multiple '-isysroot' arguments are present.
    """
    stripArch = stripSysroot = 0

    compiler_so = list(compiler_so)
    kernel_version = os.uname()[2]  # 8.4.3
    major_version = int(kernel_version.split('.')[0])

    if major_version < 8:
        # OSX before 10.4.0, these don't support -arch and -isysroot at
        # all.
        stripArch = stripSysroot = True
    else:
        stripArch = '-arch' in cc_args
        stripSysroot = '-isysroot' in cc_args

    if stripArch or 'ARCHFLAGS' in os.environ:
        while 1:
            try:
                index = compiler_so.index('-arch')
                # Strip this argument and the next one:
                del compiler_so[index:index + 2]
            except ValueError:
                break

    if 'ARCHFLAGS' in os.environ and not stripArch:
        # User specified different -arch flags in the environ,
        # see also distutils.sysconfig
        compiler_so = compiler_so + os.environ['ARCHFLAGS'].split()

    if stripSysroot:
        try:
            index = compiler_so.index('-isysroot')
            # Strip this argument and the next one:
            del compiler_so[index:index + 2]
        except ValueError:
            pass

    # Check if the SDK that is used during compilation actually exists,
    # the universal build requires the usage of a universal SDK and not all
    # users have that installed by default.
    sysroot = None
    if '-isysroot' in cc_args:
        idx = cc_args.index('-isysroot')
        sysroot = cc_args[idx + 1]
    elif '-isysroot' in compiler_so:
        idx = compiler_so.index('-isysroot')
        sysroot = compiler_so[idx + 1]

    if sysroot and not os.path.isdir(sysroot):
        log.warn("Compiling with an SDK that doesn't seem to exist: %s",
                 sysroot)
        log.warn("Please check your Xcode installation")

    return compiler_so
예제 #52
0
def run_or_warn(cmd):
    if os.system(cmd) == 0: return
    log.warn("command {!r} failed".format(cmd))
예제 #53
0
    python_modules = []
    cython_modules = []
else:
    # TODO: This should be quiet by default
    print("Discovering Python/Cython source code....")
    t = time.time()
    from sage_setup.optional_extension import is_package_installed_and_updated
    distributions = ['']
    optional_packages_with_extensions = [
        'mcqd', 'bliss', 'tdlib', 'coxeter3', 'fes', 'sirocco', 'meataxe'
    ]
    distributions += [
        'sagemath-{}'.format(pkg) for pkg in optional_packages_with_extensions
        if is_package_installed_and_updated(pkg)
    ]
    log.warn('distributions = {0}'.format(distributions))
    from sage_setup.find import find_python_sources
    python_packages, python_modules, cython_modules = find_python_sources(
        SAGE_SRC, ['sage'], distributions=distributions)

    log.debug('python_packages = {0}'.format(python_packages))
    print("Discovered Python/Cython sources, time: %.2f seconds." %
          (time.time() - t))

#########################################################
### Distutils
#########################################################

code = setup(packages=python_packages,
             cmdclass=cmdclass,
             ext_modules=cython_modules)
예제 #54
0
    def run(self):
        # Generate metadata first
        self.run_command("egg_info")

        # We run install_lib before install_data, because some data hacks
        # pull their data path from the install_lib command.
        log.info("installing library code to %s" % self.bdist_dir)
        instcmd = self.get_finalized_command('install')
        old_root = instcmd.root
        instcmd.root = None
        cmd = self.call_command('install_lib', warn_dir=0)
        instcmd.root = old_root

        all_outputs, ext_outputs = self.get_ext_outputs()
        self.stubs = []
        to_compile = []
        for (p, ext_name) in enumerate(ext_outputs):
            filename, ext = os.path.splitext(ext_name)
            pyfile = os.path.join(self.bdist_dir,
                                  strip_module(filename) + '.py')
            self.stubs.append(pyfile)
            log.info("creating stub loader for %s" % ext_name)
            if not self.dry_run:
                write_stub(os.path.basename(ext_name), pyfile)
            to_compile.append(pyfile)
            ext_outputs[p] = ext_name.replace(os.sep, '/')

        to_compile.extend(self.make_init_files())
        if to_compile:
            cmd.byte_compile(to_compile)

        if self.distribution.data_files:
            self.do_install_data()

        # Make the EGG-INFO directory
        archive_root = self.bdist_dir
        egg_info = os.path.join(archive_root, 'EGG-INFO')
        self.mkpath(egg_info)
        if self.distribution.scripts:
            script_dir = os.path.join(egg_info, 'scripts')
            log.info("installing scripts to %s" % script_dir)
            self.call_command('install_scripts',
                              install_dir=script_dir,
                              no_ep=1)

        native_libs = os.path.join(self.egg_info, "native_libs.txt")
        if all_outputs:
            log.info("writing %s" % native_libs)
            if not self.dry_run:
                libs_file = open(native_libs, 'wt')
                libs_file.write('\n'.join(all_outputs))
                libs_file.write('\n')
                libs_file.close()
        elif os.path.isfile(native_libs):
            log.info("removing %s" % native_libs)
            if not self.dry_run:
                os.unlink(native_libs)

        self.copy_metadata_to(egg_info)

        write_safety_flag(os.path.join(archive_root, 'EGG-INFO'),
                          self.zip_safe())

        if os.path.exists(os.path.join(self.egg_info, 'depends.txt')):
            log.warn(
                "WARNING: 'depends.txt' will not be used by setuptools 0.6!\n"
                "Use the install_requires/extras_require setup() args instead."
            )

        if self.exclude_source_files:
            self.zap_pyfiles()

        # Make the archive
        make_zipfile(self.egg_output,
                     archive_root,
                     verbose=self.verbose,
                     dry_run=self.dry_run,
                     mode=self.gen_header())
        if not self.keep_temp:
            remove_tree(self.bdist_dir, dry_run=self.dry_run)

        # Add to 'Distribution.dist_files' so that the "upload" command works
        getattr(self.distribution, 'dist_files', []).append(
            ('bdist_egg', get_python_version(), self.egg_output))
예제 #55
0
 def _add_defaults_data_files(self):
     try:
         super()._add_defaults_data_files()
     except TypeError:
         log.warn("data_files contains unexpected objects")
예제 #56
0
 def check_extension_availability(self, ext):
     cache = os.path.join(self.build_temp,
                          'check_%s.out' % ext.feature_name)
     if not self.force and os.path.isfile(cache):
         data = open(cache).read().strip()
         if data == '1':
             return True
         elif data == '0':
             return False
     mkpath(self.build_temp)
     src = os.path.join(self.build_temp, 'check_%s.c' % ext.feature_name)
     open(src, 'w').write(ext.feature_check)
     log.info("checking if %s is compilable" % ext.feature_name)
     try:
         [obj] = self.compiler.compile(
             [src],
             macros=ext.define_macros + [(undef, )
                                         for undef in ext.undef_macros],
             include_dirs=ext.include_dirs,
             extra_postargs=(ext.extra_compile_args or []),
             depends=ext.depends)
     except CompileError:
         log.warn("")
         log.warn("%s is not found or a compiler error: forcing --%s" %
                  (ext.feature_name, ext.neg_option_name))
         log.warn("(if %s is installed correctly, you may need to" %
                  ext.feature_name)
         log.warn(" specify the option --include-dirs or uncomment and")
         log.warn(" modify the parameter include_dirs in setup.cfg)")
         open(cache, 'w').write('0\n')
         return False
     prog = 'check_%s' % ext.feature_name
     log.info("checking if %s is linkable" % ext.feature_name)
     try:
         self.compiler.link_executable(
             [obj],
             prog,
             output_dir=self.build_temp,
             libraries=ext.libraries,
             library_dirs=ext.library_dirs,
             runtime_library_dirs=ext.runtime_library_dirs,
             extra_postargs=(ext.extra_link_args or []))
     except LinkError:
         log.warn("")
         log.warn("%s is not found or a linker error: forcing --%s" %
                  (ext.feature_name, ext.neg_option_name))
         log.warn("(if %s is installed correctly, you may need to" %
                  ext.feature_name)
         log.warn(" specify the option --library-dirs or uncomment and")
         log.warn(" modify the parameter library_dirs in setup.cfg)")
         open(cache, 'w').write('0\n')
         return False
     open(cache, 'w').write('1\n')
     return True
예제 #57
0
def read_embree_location():
    """

    Attempts to locate the embree installation. First, we check for an
    EMBREE_DIR environment variable. If one is not defined, we look for
    an embree.cfg file in the root yt source directory. Finally, if that
    is not present, we default to /usr/local. If embree is installed in a
    non-standard location and none of the above are set, the compile will
    not succeed. This only gets called if check_for_pyembree() returns
    something other than None.

    """

    rd = os.environ.get("EMBREE_DIR")
    if rd is None:
        try:
            rd = open("embree.cfg").read().strip()
        except IOError:
            rd = "/usr/local"

    fail_msg = (
        "I attempted to find Embree headers in %s. \n"
        "If this is not correct, please set your correct embree location \n"
        "using EMBREE_DIR environment variable or your embree.cfg file. \n"
        "Please see http://yt-project.org/docs/dev/visualizing/unstructured_mesh_rendering.html "
        "for more information. \n" % rd
    )

    # Create a temporary directory
    tmpdir = tempfile.mkdtemp()
    curdir = os.getcwd()

    try:
        os.chdir(tmpdir)

        # Get compiler invocation
        compiler = os.getenv("CXX", "c++")
        compiler = compiler.split(" ")

        # Attempt to compile a test script.
        filename = r"test.cpp"
        file = open(filename, "wt", 1)
        file.write('#include "embree2/rtcore.h"\n' "int main() {\n" "return 0;\n" "}")
        file.flush()
        p = Popen(
            compiler + ["-I%s/include/" % rd, filename],
            stdin=PIPE,
            stdout=PIPE,
            stderr=PIPE,
        )
        output, err = p.communicate()
        exit_code = p.returncode

        if exit_code != 0:
            log.warn(
                "Pyembree is installed, but I could not compile Embree " "test code."
            )
            log.warn("The error message was: ")
            log.warn(err)
            log.warn(fail_msg)

        # Clean up
        file.close()

    except OSError:
        log.warn(
            "read_embree_location() could not find your C compiler. "
            "Attempted to use '%s'.",
            compiler,
        )
        return False

    finally:
        os.chdir(curdir)
        shutil.rmtree(tmpdir)

    return rd
예제 #58
0
파일: setup.py 프로젝트: toxi22/cyclone
        cmd.write_file("top-level names", filename, '\n'.join(pkgs) + '\n')

    egg_info.write_toplevel_names = _hacked_write_toplevel_names


setuptools.setup(
    name="cyclone",
    version="1.2",
    author="fiorix",
    author_email="*****@*****.**",
    url="http://cyclone.io/",
    license="http://www.apache.org/licenses/LICENSE-2.0",
    description="Non-blocking web server for Python. "
                "Tornado API as a Twisted protocol.",
    keywords="python non-blocking web server twisted facebook tornado",
    packages=["cyclone", "twisted.plugins", "cyclone.tests", "cyclone.testing"],
    package_data={"twisted": ["plugins/cyclone_plugin.py"],
                  "cyclone": ["appskel_default.zip",
                              "appskel_foreman.zip",
                              "appskel_signup.zip"]},
    scripts=["scripts/cyclone"],
    **extra
)

try:
    from twisted.plugin import IPlugin, getPlugins
    list(getPlugins(IPlugin))
except Exception as e:
    log.warn("*** Failed to update Twisted plugin cache. ***")
    log.warn(str(e))
예제 #59
0
    def process_template_line(self, line):
        action, patterns, dir, dir_pattern = self._parse_template_line(line)
        if action == 'include':
            self.debug_print('include ' + ' '.join(patterns))
            for pattern in patterns:
                if not self.include_pattern(pattern, anchor=1):
                    log.warn("warning: no files found matching '%s'", pattern)

        elif action == 'exclude':
            self.debug_print('exclude ' + ' '.join(patterns))
            for pattern in patterns:
                if not self.exclude_pattern(pattern, anchor=1):
                    log.warn(
                        "warning: no previously-included files found matching '%s'",
                        pattern)

        elif action == 'global-include':
            self.debug_print('global-include ' + ' '.join(patterns))
            for pattern in patterns:
                if not self.include_pattern(pattern, anchor=0):
                    log.warn(
                        "warning: no files found matching '%s' " +
                        'anywhere in distribution', pattern)

        elif action == 'global-exclude':
            self.debug_print('global-exclude ' + ' '.join(patterns))
            for pattern in patterns:
                if not self.exclude_pattern(pattern, anchor=0):
                    log.warn(
                        "warning: no previously-included files matching '%s' found anywhere in distribution",
                        pattern)

        elif action == 'recursive-include':
            self.debug_print('recursive-include %s %s' %
                             (dir, ' '.join(patterns)))
            for pattern in patterns:
                if not self.include_pattern(pattern, prefix=dir):
                    log.warn(
                        "warning: no files found matching '%s' " +
                        "under directory '%s'", pattern, dir)

        elif action == 'recursive-exclude':
            self.debug_print('recursive-exclude %s %s' %
                             (dir, ' '.join(patterns)))
            for pattern in patterns:
                if not self.exclude_pattern(pattern, prefix=dir):
                    log.warn(
                        "warning: no previously-included files matching '%s' found under directory '%s'",
                        pattern, dir)

        elif action == 'graft':
            self.debug_print('graft ' + dir_pattern)
            if not self.include_pattern(None, prefix=dir_pattern):
                log.warn("warning: no directories found matching '%s'",
                         dir_pattern)
        elif action == 'prune':
            self.debug_print('prune ' + dir_pattern)
            if not self.exclude_pattern(None, prefix=dir_pattern):
                log.warn(
                    'no previously-included directories found ' +
                    "matching '%s'", dir_pattern)
        else:
            raise DistutilsInternalError, "this cannot happen: invalid action '%s'" % action
        return
예제 #60
0
def check_for_openmp():
    """Returns True if local setup supports OpenMP, False otherwise

    Code adapted from astropy_helpers, originally written by Tom
    Robitaille and Curtis McCully.
    """

    # Create a temporary directory
    ccompiler = new_compiler()
    customize_compiler(ccompiler)

    tmp_dir = tempfile.mkdtemp()
    start_dir = os.path.abspath(".")

    if os.name == "nt":
        # TODO: make this work with mingw
        # AFAICS there's no easy way to get the compiler distutils
        # will be using until compilation actually happens
        compile_flag = "-openmp"
        link_flag = ""
    else:
        compile_flag = "-fopenmp"
        link_flag = "-fopenmp"

    try:
        os.chdir(tmp_dir)

        with open("test_openmp.c", "w") as f:
            f.write(CCODE)

        os.mkdir("objects")

        # Compile, link, and run test program
        with stdchannel_redirected(sys.stderr, os.devnull):
            ccompiler.compile(
                ["test_openmp.c"], output_dir="objects", extra_postargs=[compile_flag]
            )
            ccompiler.link_executable(
                glob.glob(os.path.join("objects", "*")),
                "test_openmp",
                extra_postargs=[link_flag],
            )
            output = (
                subprocess.check_output("./test_openmp")
                .decode(sys.stdout.encoding or "utf-8")
                .splitlines()
            )

        if "nthreads=" in output[0]:
            nthreads = int(output[0].strip().split("=")[1])
            if len(output) == nthreads:
                using_openmp = True
            else:
                log.warn(
                    "Unexpected number of lines from output of test "
                    "OpenMP program (output was %s)",
                    output,
                )
                using_openmp = False
        else:
            log.warn(
                "Unexpected output from test OpenMP program (output was %s)", output
            )
            using_openmp = False

    except (CompileError, LinkError):
        using_openmp = False
    finally:
        os.chdir(start_dir)

    if using_openmp:
        log.warn("Using OpenMP to compile parallel extensions")
    else:
        log.warn(
            "Unable to compile OpenMP test program so Cython\n"
            "extensions will be compiled without parallel support"
        )

    return using_openmp