コード例 #1
0
ファイル: install.py プロジェクト: blink1073/flit
def get_dirs(user=True):
    """Get the 'scripts' and 'purelib' directories we'll install into.

    This is now a thin wrapper around sysconfig.get_paths(). It's not inlined,
    because some tests mock it out to install to a different location.
    """
    if user:
        if (sys.platform == "darwin") and sysconfig.get_config_var('PYTHONFRAMEWORK'):
            return sysconfig.get_paths('osx_framework_user')
        return sysconfig.get_paths(os.name + '_user')
    else:
        # The default scheme is 'posix_prefix' or 'nt', and should work for e.g.
        # installing into a virtualenv
        return sysconfig.get_paths()
コード例 #2
0
ファイル: bandit.py プロジェクト: SalemAmeen/bandit
def _find_config():
    # prefer config file in the following order:
    # 1) current directory, 2) user home directory, 3) bundled config
    config_dirs = (
        ['.'] + [appdirs.user_config_dir("bandit")] +
        appdirs.site_config_dir("bandit", multipath=True).split(':'))
    if _running_under_virtualenv():
        config_dirs.append(os.path.join(sys.prefix, 'etc', 'bandit'))
        config_dirs.append(
            os.path.join(sysconfig.get_paths().get('purelib', ''),
                         'bandit', 'config'))
    config_locations = [os.path.join(s, BASE_CONFIG) for s in config_dirs]

    # pip on Mac installs to the following path, but appdirs expects to
    # follow Mac's BPFileSystem spec which doesn't include this path so
    # we'll insert it. Issue raised as http://git.io/vOreU
    mac_pip_cfg_path = "/usr/local/etc/bandit/bandit.yaml"
    if mac_pip_cfg_path not in config_locations:
        config_locations.append(mac_pip_cfg_path)

    for config_file in config_locations:
        if os.path.isfile(config_file):
            return config_file  # Found a valid config
    else:
        # Failed to find any config, raise an error.
        raise utils.NoConfigFileFound(config_locations)
コード例 #3
0
 def test_get_paths(self):
     scheme = get_paths()
     default_scheme = _get_default_scheme()
     wanted = _expand_vars(default_scheme, None)
     wanted = sorted(wanted.items())
     scheme = sorted(scheme.items())
     self.assertEqual(scheme, wanted)
コード例 #4
0
    def __enter__(self):
        self.save_path = os.environ.get('PATH', None)
        self.save_pythonpath = os.environ.get('PYTHONPATH', None)
        self.save_nousersite = os.environ.get('PYTHONNOUSERSITE', None)

        install_scheme = 'nt' if (os.name == 'nt') else 'posix_prefix'
        install_dirs = get_paths(install_scheme, vars={
            'base': self.path,
            'platbase': self.path,
        })

        scripts = install_dirs['scripts']
        if self.save_path:
            os.environ['PATH'] = scripts + os.pathsep + self.save_path
        else:
            os.environ['PATH'] = scripts + os.pathsep + os.defpath

        # Note: prefer distutils' sysconfig to get the
        # library paths so PyPy is correctly supported.
        purelib = get_python_lib(plat_specific=0, prefix=self.path)
        platlib = get_python_lib(plat_specific=1, prefix=self.path)
        if purelib == platlib:
            lib_dirs = purelib
        else:
            lib_dirs = purelib + os.pathsep + platlib
        if self.save_pythonpath:
            os.environ['PYTHONPATH'] = lib_dirs + os.pathsep + \
                self.save_pythonpath
        else:
            os.environ['PYTHONPATH'] = lib_dirs

        os.environ['PYTHONNOUSERSITE'] = '1'

        return self.path
コード例 #5
0
ファイル: envbuild.py プロジェクト: 2216288075/meiduo_project
    def __enter__(self):
        self.path = mkdtemp(prefix='pep517-build-env-')
        log.info('Temporary build environment: %s', self.path)

        self.save_path = os.environ.get('PATH', None)
        self.save_pythonpath = os.environ.get('PYTHONPATH', None)

        install_scheme = 'nt' if (os.name == 'nt') else 'posix_prefix'
        install_dirs = get_paths(install_scheme, vars={
            'base': self.path,
            'platbase': self.path,
        })

        scripts = install_dirs['scripts']
        if self.save_path:
            os.environ['PATH'] = scripts + os.pathsep + self.save_path
        else:
            os.environ['PATH'] = scripts + os.pathsep + os.defpath

        if install_dirs['purelib'] == install_dirs['platlib']:
            lib_dirs = install_dirs['purelib']
        else:
            lib_dirs = install_dirs['purelib'] + os.pathsep + \
                install_dirs['platlib']
        if self.save_pythonpath:
            os.environ['PYTHONPATH'] = lib_dirs + os.pathsep + \
                self.save_pythonpath
        else:
            os.environ['PYTHONPATH'] = lib_dirs

        return self
コード例 #6
0
ファイル: wheel.py プロジェクト: mwilliamson/pip
    def __enter__(self):
        self._temp_dir.create()

        self.save_path = os.environ.get('PATH', None)
        self.save_pythonpath = os.environ.get('PYTHONPATH', None)

        install_scheme = 'nt' if (os.name == 'nt') else 'posix_prefix'
        install_dirs = get_paths(install_scheme, vars={
            'base': self._temp_dir.path,
            'platbase': self._temp_dir.path,
        })

        scripts = install_dirs['scripts']
        if self.save_path:
            os.environ['PATH'] = scripts + os.pathsep + self.save_path
        else:
            os.environ['PATH'] = scripts + os.pathsep + os.defpath

        if install_dirs['purelib'] == install_dirs['platlib']:
            lib_dirs = install_dirs['purelib']
        else:
            lib_dirs = install_dirs['purelib'] + os.pathsep + \
                install_dirs['platlib']
        if self.save_pythonpath:
            os.environ['PYTHONPATH'] = lib_dirs + os.pathsep + \
                self.save_pythonpath
        else:
            os.environ['PYTHONPATH'] = lib_dirs

        return self._temp_dir.path
コード例 #7
0
def _write_ninja_file(path, name, sources, extra_cflags, extra_ldflags,
                      extra_include_paths):
    # Version 1.3 is required for the `deps` directive.
    config = ['ninja_required_version = 1.3']
    config.append('cxx = {}'.format(os.environ.get('CXX', 'c++')))

    # Turn into absolute paths so we can emit them into the ninja build
    # file wherever it is.
    sources = [os.path.abspath(file) for file in sources]
    includes = [os.path.abspath(file) for file in extra_include_paths]

    # include_paths() gives us the location of torch/torch.h
    includes += include_paths()
    # sysconfig.get_paths()['include'] gives us the location of Python.h
    includes.append(sysconfig.get_paths()['include'])

    cflags = ['-fPIC', '-std=c++11']
    cflags += ['-I{}'.format(include) for include in includes]
    cflags += extra_cflags
    flags = ['cflags = {}'.format(' '.join(cflags))]

    ldflags = ['-shared'] + extra_ldflags
    # The darwin linker needs explicit consent to ignore unresolved symbols
    if sys.platform == 'darwin':
        ldflags.append('-undefined dynamic_lookup')
    flags.append('ldflags = {}'.format(' '.join(ldflags)))

    # See https://ninja-build.org/build.ninja.html for reference.
    compile_rule = ['rule compile']
    compile_rule.append(
        '  command = $cxx -MMD -MF $out.d $cflags -c $in -o $out')
    compile_rule.append('  depfile = $out.d')
    compile_rule.append('  deps = gcc')
    compile_rule.append('')

    link_rule = ['rule link']
    link_rule.append('  command = $cxx $ldflags $in -o $out')

    # Emit one build rule per source to enable incremental build.
    object_files = []
    build = []
    for source_file in sources:
        # '/path/to/file.cpp' -> 'file'
        file_name = os.path.splitext(os.path.basename(source_file))[0]
        target = '{}.o'.format(file_name)
        object_files.append(target)
        build.append('build {}: compile {}'.format(target, source_file))

    library_target = '{}.so'.format(name)
    link = ['build {}: link {}'.format(library_target, ' '.join(object_files))]

    default = ['default {}'.format(library_target)]

    # 'Blocks' should be separated by newlines, for visual benefit.
    blocks = [config, flags, compile_rule, link_rule, build, link, default]
    with open(path, 'w') as build_file:
        for block in blocks:
            lines = '\n'.join(block)
            build_file.write('{}\n\n'.format(lines))
コード例 #8
0
ファイル: isort.py プロジェクト: Elizaveta239/PyDev.Debugger
def get_stdlib_path():
    """Returns the path to the standard lib for the current path installation.

    This function can be dropped and "sysconfig.get_paths()" used directly once Python 2.6 support is dropped.
    """
    if sys.version_info >= (2, 7):
        import sysconfig
        return sysconfig.get_paths()['stdlib']
    else:
        return os.path.join(sys.prefix, 'lib')
コード例 #9
0
 def expand_categories(self, path_with_categories):
     local_vars = get_paths()
     local_vars['distribution.name'] = self.distribution.metadata['Name']
     expanded_path = format_value(path_with_categories, local_vars)
     expanded_path = format_value(expanded_path, local_vars)
     if '{' in expanded_path and '}' in expanded_path:
         logger.warning(
             '%s: unable to expand %s, some categories may be missing',
             self.get_command_name(), path_with_categories)
     return expanded_path
コード例 #10
0
ファイル: meta.py プロジェクト: Kewpie007/clastic
 def get_context(self):
     ret = {}
     try:
         ret['sysconfig'] = sysconfig.get_config_vars()
     except:
         pass
     try:
         ret['paths'] = sysconfig.get_paths()
     except:
         pass
     return ret
コード例 #11
0
 def select_scheme(self, name):
     """Set the install directories by applying the install schemes."""
     # it's the caller's problem if they supply a bad name!
     scheme = get_paths(name, expand=False)
     for key, value in scheme.items():
         if key == 'platinclude':
             key = 'headers'
             value = os.path.join(value, self.distribution.metadata['Name'])
         attrname = 'install_' + key
         if hasattr(self, attrname):
             if getattr(self, attrname) is None:
                 setattr(self, attrname, value)
コード例 #12
0
ファイル: build_env.py プロジェクト: mkurnikov/pip
 def __init__(self, path):
     # type: (str) -> None
     self.path = path
     self.setup = False
     self.bin_dir = get_paths(
         'nt' if os.name == 'nt' else 'posix_prefix',
         vars={'base': path, 'platbase': path}
     )['scripts']
     # Note: prefer distutils' sysconfig to get the
     # library paths so PyPy is correctly supported.
     purelib = get_python_lib(plat_specific=False, prefix=path)
     platlib = get_python_lib(plat_specific=True, prefix=path)
     if purelib == platlib:
         self.lib_dirs = [purelib]
     else:
         self.lib_dirs = [purelib, platlib]
コード例 #13
0
ファイル: finders.py プロジェクト: timothycrosley/isort
    def __init__(self, config, sections):
        super(PathFinder, self).__init__(config, sections)

        # restore the original import path (i.e. not the path to bin/isort)
        self.paths = [os.getcwd()]

        # virtual env
        self.virtual_env = self.config.get('virtual_env') or os.environ.get('VIRTUAL_ENV')
        if self.virtual_env:
            self.virtual_env = os.path.realpath(self.virtual_env)
        self.virtual_env_src = False
        if self.virtual_env:
            self.virtual_env_src = '{0}/src/'.format(self.virtual_env)
            for path in glob('{0}/lib/python*/site-packages'.format(self.virtual_env)):
                if path not in self.paths:
                    self.paths.append(path)
            for path in glob('{0}/lib/python*/*/site-packages'.format(self.virtual_env)):
                if path not in self.paths:
                    self.paths.append(path)
            for path in glob('{0}/src/*'.format(self.virtual_env)):
                if os.path.isdir(path):
                    self.paths.append(path)

        # conda
        self.conda_env = self.config.get('conda_env') or os.environ.get('CONDA_PREFIX')
        if self.conda_env:
            self.conda_env = os.path.realpath(self.conda_env)
            for path in glob('{0}/lib/python*/site-packages'.format(self.conda_env)):
                if path not in self.paths:
                    self.paths.append(path)
            for path in glob('{0}/lib/python*/*/site-packages'.format(self.conda_env)):
                if path not in self.paths:
                    self.paths.append(path)

        # handle case-insensitive paths on windows
        self.stdlib_lib_prefix = os.path.normcase(sysconfig.get_paths()['stdlib'])
        if self.stdlib_lib_prefix not in self.paths:
            self.paths.append(self.stdlib_lib_prefix)

        # handle compiled libraries
        self.ext_suffix = sysconfig.get_config_var("EXT_SUFFIX") or ".so"

        # add system paths
        for path in sys.path[1:]:
            if path not in self.paths:
                self.paths.append(path)
コード例 #14
0
def CopyPythonLibs(dst, overwrite_lib, report=print):
    import sysconfig
    src = sysconfig.get_paths()['platstdlib']
    # Unix 'platstdlib' excludes 'lib', eg:
    #  '/usr/lib/python3.3' vs 'C:\blender\bin\2.58\python\Lib'
    # in both cases we have to end up with './2.58/python/lib'
    if sys.platform[:3] != "win":
        dst = os.path.join(dst, os.path.basename(src))

    if os.path.exists(src):
        write = False
        if os.path.exists(dst):
            if overwrite_lib:
                shutil.rmtree(dst)
                write = True
        else:
            write = True
        if write:
            shutil.copytree(src, dst, ignore=lambda dir, contents: [i for i in contents if i == '__pycache__'])
    else:
        report({'WARNING'}, "Python not found in %r, skipping pythn copy." % src)
コード例 #15
0
import pandas as pd
from datetime import datetime
import sysconfig
from ipywidgets import Image, HTML, Button, IntProgress, \
    Box, HBox, VBox, GridBox, Layout, ButtonStyle, Output
from IPython.display import display, clear_output

package_dir = sysconfig.get_paths()['purelib']
logo_path = package_dir + '/tortus/Images/tortus_logo.png'

try:
    with open(logo_path, 'rb') as image_file:
        image = image_file.read()

    logo = Image(value=image, format='png', width='100%')
    welcome = HTML("<h2 style='text-align:center'>\
        easy text annotation in a Jupyter Notebook</h2>")

except:
    logo = HTML("<h1 style='text-align:center'>t &nbsp; <span style=\
        'color:#36a849'>o</span> &nbsp; r &nbsp; t &nbsp; u &nbsp; s</h2>")
    welcome = HTML("<h3 style='text-align:center'>\
        easy text annotation in a Jupyter Notebook</h3>")

display(logo, welcome)


class Tortus:
    '''Text annotation within a Jupyter Notebook
    
    :attr annotation_index: A counter for the annotations in progress
コード例 #16
0
import sys
import os
from pathlib import Path 
import sysconfig;
import re

"""This tool makes available packages installed via apt to python installed in the original Docker image in /usr/local"""

stdlibDir = Path(sysconfig.get_paths()['platstdlib'])

cextNameRx = re.compile("^(?P<name>.+)\\.(?P<impl>cpython)-(?P<major>\\d)(?P<minor>\\d)m?-(?P<arch>x86_64)-(?P<abi>linux-gnu)\\.so$")

def genNewName(name, impl, major, minor, arch, abi):
    return name + "." + impl + "-" + str(major) + str(minor) + ("m" if (major, minor) < (3, 8) else "") + "-" + arch + "-" + abi + ".so"

def symlink(f, to):
    print(f, "->", to)
    os.symlink(f, to)

def genNewSoPath(parent, name):
    m = cextNameRx.match(name)
    if m:
        d = m.groupdict()
        d["major"] = sys.version_info[0]
        d["minor"] = sys.version_info[1]
        return parent / genNewName(**d)
    else:
        return parent / name

def fixSoPath(f):
    to = genNewSoPath(f.parent, f.name)
コード例 #17
0
        'binarization/cuda/functions_cuda_kernel.cu'
    ]
}

setuptools.setup(name="nncf",
                 version=find_version(os.path.join(here, "nncf/version.py")),
                 author="Intel",
                 author_email="*****@*****.**",
                 description="Neural Networks Compression Framework",
                 long_description=long_description,
                 long_description_content_type="text/markdown",
                 url="https://github.com/opencv/openvino-training-extensions",
                 packages=setuptools.find_packages(),
                 dependency_links=DEPENDENCY_LINKS,
                 classifiers=[
                     "Programming Language :: Python :: 3",
                     "License :: OSI Approved :: Apache Software License",
                     "Operating System :: OS Independent",
                 ],
                 install_requires=INSTALL_REQUIRES,
                 extras_require=EXTRAS_REQUIRE,
                 package_data=package_data,
                 keywords=KEY)

path_to_ninja = glob.glob(
    str(sysconfig.get_paths()["purelib"] + "/ninja*/ninja/data/bin/"))
if path_to_ninja:
    path_to_ninja = str(path_to_ninja[0] + "ninja")
    if not os.access(path_to_ninja, os.X_OK):
        os.chmod(path_to_ninja, 755)
コード例 #18
0
ファイル: sysconfig_get_paths.py プロジェクト: artivee/basc
#!/usr/bin/env python
# encoding: utf-8
#
# Copyright (c) 2010 Doug Hellmann.  All rights reserved.
#
"""The paths for a scheme.
"""
#end_pymotw_header

import sysconfig
import pprint
import os

for scheme in ['posix_prefix', 'posix_user']:
    print scheme
    print '=' * len(scheme)
    paths = sysconfig.get_paths(scheme=scheme)
    prefix = os.path.commonprefix(paths.values())
    print 'prefix = %s\n' % prefix
    for name, path in sorted(paths.items()):
        print '%s\n  .%s' % (name, path[len(prefix):])
    print 
コード例 #19
0
ファイル: utils.py プロジェクト: cherry003/Cyberbrain
# Example:
# 0    a = true
# 1    a = true
# 2    b = {
# 3        'foo': 'bar'
# 4    }
# 5    c = false
#
# For the assignment of b, start_lineno = 2, end_lineno = 4
Surrounding = typing.NamedTuple("Surrounding", [("start_lineno", int),
                                                ("end_lineno", int)])

SourceLocation = typing.NamedTuple("SourceLocation", [("filepath", str),
                                                      ("lineno", int)])

installation_paths = list(sysconfig.get_paths().values())


class ID(str):
    """A class that represents an identifier.

    TODO: Create a hash function so that ID can be differenciated with string.
    """


@lru_cache()
def _on_laike9m_pc():
    return os.environ.get("MY_PC") == "true"


@lru_cache()
コード例 #20
0
import glob
import importlib
import inspect
import json
import pkgutil
import os
import sys
import sysconfig

pkg_dir = os.path.join(sysconfig.get_paths()["purelib"], "diagrams")
rsc_dir = os.path.join(sysconfig.get_paths()["purelib"], "resources")

providers_dirs = glob.glob(os.path.join(pkg_dir, "*"))
providers_dirs = list(filter(lambda d : (os.path.isdir(d) and not os.path.basename(d).startswith("__")), providers_dirs))

providers = list(map(lambda d: os.path.basename(d), providers_dirs))

for pkg_dir in providers_dirs:
    for (_, node_name, _) in pkgutil.iter_modules([pkg_dir]):
        provider = os.path.basename(pkg_dir)
        try:
            importlib.import_module("diagrams." + provider + "." + node_name, __package__)
        except: # TODO: handle exception
            pass

# [{"pkg":"diagrams.oci.storage", "node":"StorageGatewayWhite", "icon":"storage-gateway-white.png"}, ...]
json_list = []
for m in sys.modules.keys():
    if not m.startswith("diagrams"):
        pass
    for n, c in inspect.getmembers(sys.modules[m], inspect.isclass):
コード例 #21
0
def RegisterPythonwin(register=True, lib_dir=None):
    """Add (or remove) Pythonwin to context menu for python scripts.
    ??? Should probably also add Edit command for pys files also.
    Also need to remove these keys on uninstall, but there's no function
        like file_created to add registry entries to uninstall log ???
    """
    import os

    if lib_dir is None:
        lib_dir = sysconfig.get_paths()["platlib"]
    classes_root = get_root_hkey()
    ## Installer executable doesn't seem to pass anything to postinstall script indicating if it's a debug build,
    pythonwin_exe = os.path.join(lib_dir, "Pythonwin", "Pythonwin.exe")
    pythonwin_edit_command = pythonwin_exe + ' -edit "%1"'

    keys_vals = [
        (
            "Software\\Microsoft\\Windows\\CurrentVersion\\App Paths\\Pythonwin.exe",
            "",
            pythonwin_exe,
        ),
        (
            "Software\\Classes\\Python.File\\shell\\Edit with Pythonwin",
            "command",
            pythonwin_edit_command,
        ),
        (
            "Software\\Classes\\Python.NoConFile\\shell\\Edit with Pythonwin",
            "command",
            pythonwin_edit_command,
        ),
    ]

    try:
        if register:
            for key, sub_key, val in keys_vals:
                ## Since winreg only uses the character Api functions, this can fail if Python
                ##  is installed to a path containing non-ascii characters
                hkey = winreg.CreateKey(classes_root, key)
                if sub_key:
                    hkey = winreg.CreateKey(hkey, sub_key)
                winreg.SetValueEx(hkey, None, 0, winreg.REG_SZ, val)
                hkey.Close()
        else:
            for key, sub_key, val in keys_vals:
                try:
                    if sub_key:
                        hkey = winreg.OpenKey(classes_root, key)
                        winreg.DeleteKey(hkey, sub_key)
                        hkey.Close()
                    winreg.DeleteKey(classes_root, key)
                except OSError as why:
                    winerror = getattr(why, "winerror", why.errno)
                    if winerror != 2:  # file not found
                        raise
    finally:
        # tell windows about the change
        from win32com.shell import shell, shellcon

        shell.SHChangeNotify(shellcon.SHCNE_ASSOCCHANGED,
                             shellcon.SHCNF_IDLIST, None, None)
コード例 #22
0
 def is_library(self):
     return any(
         self.filename.startswith(path) for path in sysconfig.get_paths().values()
     )
コード例 #23
0
    import jsonpickle.ext.numpy as jsonpickle_numpy

    # Make sure Numpy and Pandas objects can be correctly encoded.
    # https://github.com/jsonpickle/jsonpickle#numpy-support
    jsonpickle_numpy.register_handlers()
except ImportError:
    pass

try:
    import jsonpickle.ext.pandas as jsonpickle_pandas

    jsonpickle_pandas.register_handlers()
except ImportError:
    pass

_INSTALLATION_PATHS = list(sysconfig.get_paths().values())
_PYTHON_EXECUTABLE_PATH = sys.executable

jsonpickle.set_preferred_backend("ujson")


# To not let it show warnings
@cheap_repr.register_repr(argparse.Namespace)
def repr_for_namespace(_, __):
    return "argparse.Namespace"


def get_current_callable(frame: FrameType):
    """Returns the callable that generates the frame.

    See https://stackoverflow.com/a/52762678/2142577.
コード例 #24
0
    )
    parser.add_argument(
        "-silent",
        default=False,
        action="store_true",
        help='Don\'t display the "Abort/Retry/Ignore" dialog for files in use.',
    )
    parser.add_argument(
        "-quiet",
        default=False,
        action="store_true",
        help="Don't display progress messages.",
    )
    parser.add_argument(
        "-destination",
        default=sysconfig.get_paths()["platlib"],
        type=verify_destination,
        help="Location of the PyWin32 installation",
    )

    args = parser.parse_args()

    if not args.quiet:
        print("Parsed arguments are: {}".format(args))

    if not args.install ^ args.remove:
        parser.error("You need to either choose to -install or -remove!")

    if args.wait is not None:
        try:
            os.waitpid(args.wait, 0)
コード例 #25
0
$NetBSD: patch-src_pip___internal_build__env.py,v 1.4 2022/08/10 09:17:48 adam Exp $

Copy distutils distro config file to overlay.
https://github.com/pypa/pip/issues/10949

--- src/pip/_internal/build_env.py.orig	2022-08-03 18:55:14.000000000 +0000
+++ src/pip/_internal/build_env.py
@@ -7,6 +7,7 @@ import pathlib
 import sys
 import textwrap
 from collections import OrderedDict
+from shutil import copy
 from sysconfig import get_paths
 from types import TracebackType
 from typing import TYPE_CHECKING, Iterable, List, Optional, Set, Tuple, Type
@@ -81,6 +82,9 @@ class BuildEnvironment:
         self._site_dir = os.path.join(temp_dir.path, "site")
         if not os.path.exists(self._site_dir):
             os.mkdir(self._site_dir)
+        distutils_distro_config = get_paths()["platlib"] + "/_distutils_system_mod.py"
+        if os.path.exists(distutils_distro_config):
+            copy(distutils_distro_config, self._site_dir)
         with open(
             os.path.join(self._site_dir, "sitecustomize.py"), "w", encoding="utf-8"
         ) as fp:
コード例 #26
0
def get_share_directory(
        app_name) -> Tuple[List[LogMessage], List[ExitMessage], Optional[str]]:
    # pylint: disable=too-many-return-statements
    """Return datadir to use for the ansible-launcher data files. First found wins.

    Example datadir: /usr/share/ansible_navigator

    :param app_name: Name of application - currently ansible_navigator
    :returns: Log messages and full datadir path
    """
    messages: List[LogMessage] = []
    exit_messages: List[ExitMessage] = []
    share_directory = None

    def debug_log(directory: str, found: bool, description: str):
        template = "Share directory '{directory}' {status} ({description})"
        formatted = template.format(
            directory=directory,
            status="found" if found else "not found",
            description=description,
        )
        msg = LogMessage(level=logging.DEBUG, message=formatted)
        messages.append(msg)

    # Development path
    # We want the share directory to resolve adjacent to the directory the code lives in
    # as that's the layout in the source.
    share_directory = os.path.abspath(
        os.path.join(os.path.dirname(__file__), "..", "..", "..", "share",
                     app_name), )
    description = "development path"
    if os.path.exists(share_directory):
        debug_log(share_directory, True, description)
        return messages, exit_messages, share_directory
    debug_log(share_directory, False, description)

    # ~/.local/share/APP_NAME
    userbase = sysconfig.get_config_var("userbase")
    description = "userbase"
    if userbase is not None:
        share_directory = os.path.join(userbase, "share", app_name)
        if os.path.exists(share_directory):
            debug_log(share_directory, True, description)
            return messages, exit_messages, share_directory
    debug_log(share_directory, False, description)

    # /usr/share/APP_NAME  (or the venv equivalent)
    share_directory = os.path.join(sys.prefix, "share", app_name)
    description = "sys.prefix"
    if os.path.exists(share_directory):
        debug_log(share_directory, True, description)
        return messages, exit_messages, share_directory
    debug_log(share_directory, False, description)

    # /usr/share/APP_NAME  (or what was specified as the datarootdir when python was built)
    datarootdir = sysconfig.get_config_var("datarootdir")
    description = "datarootdir"
    if datarootdir is not None:
        share_directory = os.path.join(datarootdir, app_name)
        if os.path.exists(share_directory):
            debug_log(share_directory, True, description)
            return messages, exit_messages, share_directory
    debug_log(share_directory, False, description)

    # /Library/Python/x.y/share/APP_NAME  (common on macOS)
    datadir = sysconfig.get_paths().get("data")
    description = "datadir"
    if datadir is not None:
        share_directory = os.path.join(datadir, "share", app_name)
        if os.path.exists(share_directory):
            debug_log(share_directory, True, description)
            return messages, exit_messages, share_directory
    debug_log(share_directory, False, description)

    # /usr/local/share/APP_NAME
    prefix = sysconfig.get_config_var("prefix")
    description = "prefix"
    if prefix is not None:
        share_directory = os.path.join(prefix, "local", "share", app_name)
        if os.path.exists(share_directory):
            debug_log(share_directory, True, description)
            return messages, exit_messages, share_directory
    debug_log(share_directory, False, description)

    exit_msg = "Unable to find a viable share directory"
    exit_messages.append(ExitMessage(message=exit_msg))
    return messages, exit_messages, None
コード例 #27
0
            gst_flags = {
                'extra_link_args': [
                    '-F/Library/Frameworks', '-Xlinker', '-rpath', '-Xlinker',
                    '/Library/Frameworks', '-Xlinker', '-headerpad',
                    '-Xlinker', '190', '-framework', 'GStreamer'
                ],
                'include_dirs': [join(f_path, 'Headers')]
            }
    elif platform == 'win32':
        gst_flags = pkgconfig('gstreamer-1.0')
        if 'libraries' in gst_flags:
            print('GStreamer found via pkg-config')
            gstreamer_valid = True
            c_options['use_gstreamer'] = True
        else:
            _includes = get_isolated_env_paths()[0] + [get_paths()['include']]
            for include_dir in _includes:
                if exists(join(include_dir, 'gst', 'gst.h')):
                    print('GStreamer found via gst.h')
                    gstreamer_valid = True
                    c_options['use_gstreamer'] = True
                    gst_flags = {
                        'libraries':
                        ['gstreamer-1.0', 'glib-2.0', 'gobject-2.0']
                    }
                    break

    if not gstreamer_valid:
        # use pkg-config approach instead
        gst_flags = pkgconfig('gstreamer-1.0')
        if 'libraries' in gst_flags:
コード例 #28
0
ファイル: cpp_extension.py プロジェクト: Jsmilemsj/pytorch
def _write_ninja_file(path,
                      name,
                      sources,
                      extra_cflags,
                      extra_cuda_cflags,
                      extra_ldflags,
                      extra_include_paths,
                      with_cuda=False):
    # Version 1.3 is required for the `deps` directive.
    config = ['ninja_required_version = 1.3']
    config.append('cxx = {}'.format(os.environ.get('CXX', 'c++')))
    if with_cuda:
        config.append('nvcc = {}'.format(_join_cuda_home('bin', 'nvcc')))

    # Turn into absolute paths so we can emit them into the ninja build
    # file wherever it is.
    sources = [os.path.abspath(file) for file in sources]
    includes = [os.path.abspath(file) for file in extra_include_paths]

    # include_paths() gives us the location of torch/torch.h
    includes += include_paths(with_cuda)
    # sysconfig.get_paths()['include'] gives us the location of Python.h
    includes.append(sysconfig.get_paths()['include'])

    common_cflags = ['-DTORCH_EXTENSION_NAME={}'.format(name)]
    common_cflags += ['-I{}'.format(include) for include in includes]

    cflags = common_cflags + ['-fPIC', '-std=c++11'] + extra_cflags
    flags = ['cflags = {}'.format(' '.join(cflags))]

    if with_cuda:
        cuda_flags = common_cflags
        cuda_flags += ['--compiler-options', "'-fPIC'"]
        cuda_flags += extra_cuda_cflags
        if not any(flag.startswith('-std=') for flag in cuda_flags):
            cuda_flags.append('-std=c++11')
        flags.append('cuda_flags = {}'.format(' '.join(cuda_flags)))

    ldflags = ['-shared'] + extra_ldflags
    # The darwin linker needs explicit consent to ignore unresolved symbols.
    if sys.platform == 'darwin':
        ldflags.append('-undefined dynamic_lookup')
    flags.append('ldflags = {}'.format(' '.join(ldflags)))

    # See https://ninja-build.org/build.ninja.html for reference.
    compile_rule = ['rule compile']
    compile_rule.append(
        '  command = $cxx -MMD -MF $out.d $cflags -c $in -o $out')
    compile_rule.append('  depfile = $out.d')
    compile_rule.append('  deps = gcc')

    if with_cuda:
        cuda_compile_rule = ['rule cuda_compile']
        cuda_compile_rule.append(
            '  command = $nvcc $cuda_flags -c $in -o $out')

    link_rule = ['rule link']
    link_rule.append('  command = $cxx $ldflags $in -o $out')

    # Emit one build rule per source to enable incremental build.
    object_files = []
    build = []
    for source_file in sources:
        # '/path/to/file.cpp' -> 'file'
        file_name = os.path.splitext(os.path.basename(source_file))[0]
        if _is_cuda_file(source_file):
            rule = 'cuda_compile'
            # Use a different object filename in case a C++ and CUDA file have
            # the same filename but different extension (.cpp vs. .cu).
            target = '{}.cuda.o'.format(file_name)
        else:
            rule = 'compile'
            target = '{}.o'.format(file_name)
        object_files.append(target)
        build.append('build {}: {} {}'.format(target, rule, source_file))

    library_target = '{}.so'.format(name)
    link = ['build {}: link {}'.format(library_target, ' '.join(object_files))]

    default = ['default {}'.format(library_target)]

    # 'Blocks' should be separated by newlines, for visual benefit.
    blocks = [config, flags, compile_rule]
    if with_cuda:
        blocks.append(cuda_compile_rule)
    blocks += [link_rule, build, link, default]
    with open(path, 'w') as build_file:
        for block in blocks:
            lines = '\n'.join(block)
            build_file.write('{}\n\n'.format(lines))
コード例 #29
0
ファイル: tbtools.py プロジェクト: groth00/DATA228
 def is_library(self) -> bool:
     return any(
         self.filename.startswith(os.path.realpath(path))
         for path in sysconfig.get_paths().values())
コード例 #30
0
from distutils.command.install_scripts import install_scripts as d_install_scripts
from distutils.core import setup
from distutils.dist import DistributionMetadata

## GLOBAL VARS SETUP ##

_newdirsep = p_realpath('.')
_dirsep = ''
while _newdirsep != _dirsep: # iterate to find '/' or the system equivalent
    _dirsep = _newdirsep
    _newdirsep = p_dirname(_dirsep)
_dirsep = p_splitdrive(_dirsep)[1]
del _newdirsep
_projectpath = p_realpath('.')
_configvars = sysconfig.get_config_vars()
_configpaths = sysconfig.get_paths()
if p_basename(_configpaths['data']) == 'usr': #GOTCHA: '[path]/usr', not only '/usr', to allow for virtualenvs...
    _configprefix = p_normpath(p_join(_configpaths['data'], p_pardir, 'etc')) # "[path]/usr" => "[path]/etc" ("[path]/usr/etc", FHS-friendly)
else:
    _configprefix = p_join(_configpaths['data'], 'etc') # "[path]/[something_else]" => "[path]/[something_else]/etc"
_dirsep, _projectpath, _configprefix = unicode(_dirsep), unicode(_projectpath), unicode(_configprefix)

## TO EDIT ##

project = {
    'description': 'Async server micro-framework for control freaks',
    'hosttype': 'github',
    'repotype': 'git',
    'username': '******',
    'author': 'Rowan Thorpe',
    'author_email': '*****@*****.**',
コード例 #31
0
def _write_ninja_file(path,
                      name,
                      sources,
                      extra_cflags,
                      extra_cuda_cflags,
                      extra_ldflags,
                      extra_include_paths,
                      with_cuda=False):
    # Version 1.3 is required for the `deps` directive.
    config = ['ninja_required_version = 1.3']
    config.append('cxx = {}'.format(os.environ.get('CXX', 'c++')))
    if with_cuda:
        config.append('nvcc = {}'.format(_join_cuda_home('bin', 'nvcc')))

    # Turn into absolute paths so we can emit them into the ninja build
    # file wherever it is.
    sources = [os.path.abspath(file) for file in sources]
    includes = [os.path.abspath(file) for file in extra_include_paths]

    # include_paths() gives us the location of torch/torch.h
    includes += include_paths(with_cuda)
    # sysconfig.get_paths()['include'] gives us the location of Python.h
    includes.append(sysconfig.get_paths()['include'])

    common_cflags = ['-DTORCH_EXTENSION_NAME={}'.format(name)]
    common_cflags += ['-I{}'.format(include) for include in includes]

    cflags = common_cflags + ['-fPIC', '-std=c++11'] + extra_cflags
    if sys.platform == 'win32':
        from distutils.spawn import _nt_quote_args
        cflags = _nt_quote_args(cflags)
    flags = ['cflags = {}'.format(' '.join(cflags))]

    if with_cuda:
        cuda_flags = common_cflags
        if sys.platform == 'win32':
            cuda_flags = _nt_quote_args(cuda_flags)
        else:
            cuda_flags += ['--compiler-options', "'-fPIC'"]
            cuda_flags += extra_cuda_cflags
            if not any(flag.startswith('-std=') for flag in cuda_flags):
                cuda_flags.append('-std=c++11')

        flags.append('cuda_flags = {}'.format(' '.join(cuda_flags)))

    if sys.platform == 'win32':
        ldflags = ['/DLL'] + extra_ldflags
    else:
        ldflags = ['-shared'] + extra_ldflags
    # The darwin linker needs explicit consent to ignore unresolved symbols.
    if sys.platform == 'darwin':
        ldflags.append('-undefined dynamic_lookup')
    elif sys.platform == 'win32':
        ldflags = _nt_quote_args(ldflags)
    flags.append('ldflags = {}'.format(' '.join(ldflags)))

    # See https://ninja-build.org/build.ninja.html for reference.
    compile_rule = ['rule compile']
    if sys.platform == 'win32':
        compile_rule.append(
            '  command = cl /showIncludes $cflags -c $in /Fo$out')
        compile_rule.append('  deps = msvc')
    else:
        compile_rule.append(
            '  command = $cxx -MMD -MF $out.d $cflags -c $in -o $out')
        compile_rule.append('  depfile = $out.d')
        compile_rule.append('  deps = gcc')

    if with_cuda:
        cuda_compile_rule = ['rule cuda_compile']
        cuda_compile_rule.append(
            '  command = $nvcc $cuda_flags -c $in -o $out')

    link_rule = ['rule link']
    if sys.platform == 'win32':
        cl_paths = subprocess.check_output(['where',
                                            'cl']).decode().split('\r\n')
        if len(cl_paths) >= 1:
            cl_path = os.path.dirname(cl_paths[0]).replace(':', '$:')
        else:
            raise RuntimeError("MSVC is required to load C++ extensions")
        link_rule.append(
            '  command = "{}/link.exe" $in /nologo $ldflags /out:$out'.format(
                cl_path))
    else:
        link_rule.append('  command = $cxx $ldflags $in -o $out')

    # Emit one build rule per source to enable incremental build.
    object_files = []
    build = []
    for source_file in sources:
        # '/path/to/file.cpp' -> 'file'
        file_name = os.path.splitext(os.path.basename(source_file))[0]
        if _is_cuda_file(source_file):
            rule = 'cuda_compile'
            # Use a different object filename in case a C++ and CUDA file have
            # the same filename but different extension (.cpp vs. .cu).
            target = '{}.cuda.o'.format(file_name)
        else:
            rule = 'compile'
            target = '{}.o'.format(file_name)
        object_files.append(target)
        if sys.platform == 'win32':
            source_file = source_file.replace(':', '$:')
        build.append('build {}: {} {}'.format(target, rule, source_file))

    ext = '.pyd' if sys.platform == 'win32' else '.so'
    library_target = '{}{}'.format(name, ext)
    link = ['build {}: link {}'.format(library_target, ' '.join(object_files))]

    default = ['default {}'.format(library_target)]

    # 'Blocks' should be separated by newlines, for visual benefit.
    blocks = [config, flags, compile_rule]
    if with_cuda:
        blocks.append(cuda_compile_rule)
    blocks += [link_rule, build, link, default]
    with open(path, 'w') as build_file:
        for block in blocks:
            lines = '\n'.join(block)
            build_file.write('{}\n\n'.format(lines))
コード例 #32
0
ファイル: setup.py プロジェクト: stkyle/winpcap
import cPickle, os, sys
import logging
import fnmatch
import platform
import sysconfig
from Cython.Build import cythonize

REQUIRES = ['dpkt', 'Cython', 'setuptools']
SOURCE_FILES = ['pcap.pyx', 'pcap_ex.c']
WIN_SDK_PATH = os.environ.get('WindowsSdkDir', None)
VCINSTALLDIR = os.environ.get('VCINSTALLDIR', None)


# Header Files
INC_WPCAP = r'C:\wpdpack\Include'
INC_PYTHON = sysconfig.get_paths().get('include', None)
INC_WINSDK = os.path.join(WIN_SDK_PATH,'Include') if WIN_SDK_PATH else None
INC_MSVC = os.path.join(VCINSTALLDIR, r'include') if VCINSTALLDIR else None

INCLUDE_PATHS = [INC_WPCAP, INC_PYTHON, INC_WINSDK, INC_MSVC]

# Libraries
LIB_WPACP = r'C:\wpdpack\Lib\x64'
LIB_PYTHON = r'C:\Anaconda3\envs\py2.7\libs'

LIBRARIES = ['wpcap', 'iphlpapi']
EXTRA_COMPILE_ARGS = [ '-DWIN32', '-DWPCAP' ,'-D_CRT_SECURE_NO_WARNINGS']

DEFINE_MACROS = []
#DEFINE_MACROS += [('HAVE_PCAP_INT_H', 0)]
DEFINE_MACROS += [('HAVE_PCAP_FILE', 1)]
コード例 #33
0
ファイル: get_python_lib.py プロジェクト: vsbogd/ure
import sys
import sysconfig
import site

if __name__ == '__main__':
    # This is a hack due to the distutils in debian/ubuntu's python3 being misconfigured
    # see discussion https://github.com/opencog/atomspace/issues/1782
    #
    # If the bug is fixed, most of this script could be replaced by:
    #
    # from distutils.sysconfig import get_python_lib; print(get_python_lib(plat_specific=True, prefix=prefix))
    #
    # However, using this would not respect a python virtual environments, so in a way this is better!
    
    prefix = sys.argv[1]

    # use sites if the prefix is recognized and the sites module is available
    # (virtualenv is missing getsitepackages())
    if hasattr(site, 'getsitepackages'):
        paths = [p for p in site.getsitepackages() if p.startswith(prefix)]
        if len(paths) == 1:
            print(paths[0])
            exit(0)
    
    # use sysconfig platlib as the fall back
    print(sysconfig.get_paths()['platlib'])
コード例 #34
0
ファイル: mkcfg.py プロジェクト: pombredanne/vanity_app
        def setup(**attrs):
            """Mock the setup(**attrs) in order to retrive metadata."""
            # use the distutils v1 processings to correctly parse metadata.
            #XXX we could also use the setuptools distibution ???
            from distutils.dist import Distribution
            dist = Distribution(attrs)
            dist.parse_config_files()
            # 1. retrieves metadata that are quite similar PEP314<->PEP345
            labels = (('name',) * 2,
                      ('version',) * 2,
                      ('author',) * 2,
                      ('author_email',) * 2,
                      ('maintainer',) * 2,
                      ('maintainer_email',) * 2,
                      ('description', 'summary'),
                      ('long_description', 'description'),
                      ('url', 'home_page'),
                      ('platforms', 'platform'))

            if sys.version[:3] >= '2.5':
                labels += (('provides', 'provides-dist'),
                           ('obsoletes', 'obsoletes-dist'),
                           ('requires', 'requires-dist'),)
            get = lambda lab: getattr(dist.metadata, lab.replace('-', '_'))
            data.update((new, get(old)) for (old, new) in labels if get(old))
            # 2. retrieves data that requires special processings.
            data['classifier'].update(dist.get_classifiers() or [])
            data['scripts'].extend(dist.scripts or [])
            data['packages'].extend(dist.packages or [])
            data['modules'].extend(dist.py_modules or [])
            # 2.1 data_files -> resources.
            if dist.data_files:
                if len(dist.data_files) < 2 or \
                   isinstance(dist.data_files[1], str):
                    dist.data_files = [('', dist.data_files)]
                # add tokens in the destination paths
                vars = {'distribution.name': data['name']}
                path_tokens = sysconfig.get_paths(vars=vars).items()
                # sort tokens to use the longest one first
                # TODO chain two sorted with key arguments, remove cmp
                path_tokens.sort(cmp=lambda x, y: cmp(len(y), len(x)),
                                 key=lambda x: x[1])
                for dest, srcs in (dist.data_files or []):
                    dest = os.path.join(sys.prefix, dest)
                    for tok, path in path_tokens:
                        if dest.startswith(path):
                            dest = ('{%s}' % tok) + dest[len(path):]
                            files = [('/ '.join(src.rsplit('/', 1)), dest)
                                     for src in srcs]
                            data['resources'].extend(files)
                            continue
            # 2.2 package_data -> extra_files
            package_dirs = dist.package_dir or {}
            for package, extras in dist.package_data.iteritems() or []:
                package_dir = package_dirs.get(package, package)
                files = [os.path.join(package_dir, f) for f in extras]
                data['extra_files'].extend(files)

            # Use README file if its content is the desciption
            if "description" in data:
                ref = md5(re.sub('\s', '', self.data['description']).lower())
                ref = ref.digest()
                for readme in glob.glob('README*'):
                    fp = open(readme)
                    try:
                        contents = fp.read()
                    finally:
                        fp.close()
                    val = md5(re.sub('\s', '', contents.lower())).digest()
                    if val == ref:
                        del data['description']
                        data['description-file'] = readme
                        break
コード例 #35
0
ファイル: scons_support.py プロジェクト: roshan2004/loos
def AutoConfiguration(env):
    global default_lib_path
    global conda_path

    conf = env.Configure(
        custom_tests={
            "CheckForSwig": CheckForSwig,
            "CheckBoostHeaders": CheckBoostHeaders,
            "CheckForBoostLibrary": CheckForBoostLibrary,
            "CheckBoostHeaderVersion": CheckBoostHeaderVersion,
            "CheckDirectory": CheckDirectory,
            "CheckAtlasRequires": CheckAtlasRequires,
            "CheckForIEC559": CheckForIEC559,
            "CheckSystemType": CheckSystemType,
            "CheckNumpy": CheckNumpy,
        })

    use_threads = int(env["threads"])

    # Get system information
    conf.CheckSystemType()

    conf.env["host_type"] = loos_build_config.host_type
    conf.env["linux_type"] = loos_build_config.linux_type

    if env.GetOption("clean") or env.GetOption("help"):
        env["HAS_NETCDF"] = 1
    else:
        has_netcdf = 0

        if env.USING_CONDA:
            conda_path = env["CONDA_PREFIX"]
            default_lib_path = conda_path + "/lib"
            if loos_build_config.host_type != "Darwin":
                conf.env.Append(RPATH=default_lib_path)
        else:
            default_lib_path = "/usr/lib"

            # if we're not in conda, add system library directory
            if not conf.CheckDirectory("/usr/lib64"):
                if not conf.CheckDirectory("/usr/lib"):
                    print(
                        "Fatal error- cannot find your system library directory"
                    )
                    conf.env.Exit(1)
            else:
                # /usr/lib64 is found, so make sure we link against this
                # (and not against any 32-bit libs)
                default_lib_path = "/usr/lib64"
        conf.env.Append(LIBPATH=default_lib_path)

        # Only setup ATLAS if we're not on a Mac and we're not using conda
        if loos_build_config.host_type != "Darwin" and not env.USING_CONDA:
            atlas_libpath = ""
            ATLAS_LIBPATH = env["ATLAS_LIBPATH"]
            ATLAS_LIBS = env["ATLAS_LIBS"]
            if not ATLAS_LIBPATH:
                # Some distros have atlas in /atlas-base, so must check that...
                if conf.CheckDirectory(default_lib_path + "/atlas-base"):
                    atlas_libpath = default_lib_path + "/atlas-base"
                elif conf.CheckDirectory(default_lib_path + "/atlas"):
                    atlas_libpath = default_lib_path + "/atlas"
                else:
                    print("Warning: Could not find an atlas directory! ")
            else:
                atlas_libpath = ATLAS_LIBPATH
                loos_build_config.user_libdirs["ATLAS"] = atlas_libpath

            if atlas_libpath:
                conf.env.Prepend(LIBPATH=[atlas_libpath])

        if not conf.CheckLib("pthread"):
            print("Error- LOOS requires a pthread library installed")

        # Now that we know the default library path, setup Boost, NetCDF, and
        # ATLAS based on the environment or custom.py file
        SetupBoostPaths(conf.env)
        SetupNetCDFPaths(conf.env)

        # Check for standard typedefs...
        if not conf.CheckType("ulong", "#include <sys/types.h>\n"):
            conf.env.Append(CCFLAGS="-DREQUIRES_ULONG")
        if not conf.CheckType("uint", "#include <sys/types.h>\n"):
            conf.env.Append(CCFLAGS="-DREQUIRES_UINT")

        # Check for floating point format...
        if not conf.CheckForIEC559():
            print(
                "Error- your system must use the IEC559/IEEE754 floating point"
            )
            print(
                "       format for Gromacs support in LOOS.  Check your compiler"
            )
            print("       options or contact the LOOS developers at")
            print("       [email protected]")
            conf.env.Exit(1)

        # --- NetCDF Autoconf
        has_netcdf = 0
        if conf.env["NETCDF_LIBS"]:
            netcdf_libs = env["NETCDF_LIBS"]
            conf.env.Append(CCFLAGS=["-DHAS_NETCDF"])
            has_netcdf = 1
        else:
            if conf.CheckLibWithHeader("netcdf", "netcdf.h",
                                       "c"):  # Should we check C or C++?
                netcdf_libs = "netcdf"
                conf.env.Append(CCFLAGS=["-DHAS_NETCDF"])
                has_netcdf = 1

        conf.env["HAS_NETCDF"] = has_netcdf

        # --- Swig Autoconf (unless user requested NO PyLOOS)
        if int(env["pyloos"]):
            if conf.CheckForSwig(loos_build_config.min_swig_version):
                conf.env["pyloos"] = 1
                pythonpath = get_paths()['include']
                if "PYTHON_INC" in conf.env:
                    if conf.env["PYTHON_INC"] != "":
                        pythonpath = conf.env["PYTHON_INC"]

                conf.env.Append(CPPPATH=[pythonpath])
                if not conf.CheckNumpy(pythonpath):
                    print("ERROR- PyLOOS build requires NumPy")
                    conf.env.Exit(1)
            else:
                conf.env["pyloos"] = 0

        # --- Boost Autoconf
        if not conf.CheckBoostHeaders():
            conf.env.Exit(1)

        if not conf.CheckBoostHeaderVersion(
                loos_build_config.min_boost_version):
            conf.env.Exit(1)

        if conf.env["BOOST_LIBS"]:
            boost_libs = env.Split(env["BOOST_LIBS"])
        if env.USING_CONDA:
            boost_libs = AutoConfigUserBoost(conf)
        elif not loos_build_config.user_boost_flag:
            boost_libs = AutoConfigSystemBoost(conf)
        else:
            boost_libs = AutoConfigUserBoost(conf)

        env.Append(LIBS=boost_libs)

        SetupEigen(conf)

        # --- Check for ATLAS/LAPACK and how to build

        if loos_build_config.host_type != "Darwin" and not env.USING_CONDA:
            atlas_libs = ""  # List of numerics libs required for LOOS

            if env["ATLAS_LIBS"]:
                atlas_libs = env.Split(env["ATLAS_LIBS"])
            else:

                numerics = {
                    "openblas": 0,
                    "satlas": 0,
                    "atlas": 0,
                    "lapack": 0,
                    "f77blas": 0,
                    "cblas": 0,
                    "blas": 0,
                }

                if use_threads:
                    numerics["tatlas"] = 0
                    numerics["ptcblas"] = 0
                    numerics["ptf77blas"] = 0

                for libname in numerics:
                    if conf.CheckLib(libname, autoadd=0):
                        numerics[libname] = 1

                atlas_libs = []
                atlas_name = ""

                has_gfortran = 0
                if conf.CheckLib("gfortran", autoadd=0):
                    has_gfortran = 1

                if use_threads and numerics["tatlas"]:
                    atlas_libs.append("tatlas")
                    atlas_name = "tatlas"
                elif numerics["satlas"]:
                    atlas_libs.append("satlas")
                    atlas_name = "satlas"
                else:

                    if numerics["lapack"]:
                        atlas_libs.append("lapack")

                    if use_threads and (numerics["ptf77blas"]
                                        and numerics["ptcblas"]):
                        atlas_libs.extend(["ptf77blas", "ptcblas"])
                    elif numerics["f77blas"] and numerics["cblas"]:
                        atlas_libs.extend(["f77blas", "cblas"])
                    elif numerics["blas"]:
                        atlas_libs.append("blas")
                    else:
                        print(
                            "Error- you must have some kind of blas installed")
                        conf.env.Exit(1)

                    if numerics["atlas"]:
                        atlas_libs.append("atlas")
                        atlas_name = "atlas"

                # Try to figure out how to build with ATLAS...
                # We need these functions, so find a combination of libs and
                # libpaths will work...
                for funcname in ("dgesvd_", "dgemm_", "dtrmm_", "dsyev_"):
                    (ok, requires_gfortran) = checkForFunction(
                        conf, funcname, atlas_libs, has_gfortran)
                    if requires_gfortran:
                        print("Build Requires gfortran")
                        atlas_libs.append("gfortran")

                    if not ok:
                        lib = checkLibsForFunction(conf, funcname,
                                                   list(numerics.keys()),
                                                   atlas_libs)
                        if lib:
                            atlas_libs.insert(0, lib)
                        else:
                            # Try putting scanning default_lib_path
                            # first...SUSE requires
                            # the lapack in /usr/lib first...
                            print(("Searching %s first for libraries..." %
                                   default_lib_path))
                            # Remove the default_lib_path from the list and
                            # prepend...
                            libpaths = list(conf.env["LIBPATH"])
                            libpaths.remove(default_lib_path)
                            libpaths.insert(0, default_lib_path)
                            conf.env["LIBPATH"] = libpaths
                            (ok, requires_gfortran) = checkForFunction(
                                conf, funcname, atlas_libs, has_gfortran)
                            if requires_gfortran:
                                print("Build requires gfortran")
                                atlas_libs.append("gfortran")

                            if not ok:
                                lib = checkLibsForFunction(
                                    conf, funcname, list(numerics.keys()),
                                    atlas_libs)
                                if lib:
                                    atlas_libs.insert(0, lib)
                                else:
                                    print(
                                        "Error- could not figure out where ",
                                        funcname,
                                        " is located.",
                                    )
                                    print(
                                        "Try manually specifying ATLAS_LIBS and ATLAS_LIBPATH"
                                    )
                                    conf.env.Exit(1)

            # Hack to extend list rather than append a list into a list
            for lib in atlas_libs:
                conf.env.Append(LIBS=lib)
        elif env.USING_CONDA:
            conf.env.Append(LIBS="openblas")

        # Suppress those annoying maybe used unitialized warnings that -Wall
        # gives us...
        ccflags = conf.env["CCFLAGS"]
        conf.env.Append(CCFLAGS=["-Wno-maybe-uninitialized", "-Werror"
                                 ])  # Try suppressing, make bad flags an error
        ok = conf.TryCompile("", ".c")
        conf.env["CCFLAGS"] = ccflags
        if ok:
            conf.env.Append(CCFLAGS=["-Wno-maybe-uninitialized"])

        environOverride(conf)
        if "LIBS" in conf.env:
            print(
                "Autoconfigure will use these libraries to build LOOS:\n\t",
                conf.env["LIBS"],
            )
        if "LIBPATH" in conf.env:
            print(
                "Autoconfigure will add the following directories to find libs:\n\t",
                conf.env["LIBPATH"],
            )
        env = conf.Finish()
コード例 #36
0
def _create_desktop_shortcut_linux(frame=None):
    from os.path import expanduser
    from os import environ, chmod, chown
    import subprocess
    import pwd
    import sysconfig
    DEFAULT_LANGUAGE = environ.get('LANG', '').split(':')
    # TODO: Add more languages
    desktop = {
        "de": "Desktop",
        "en": "Desktop",
        "es": "Escritorio",
        "fi": r"Työpöytä",
        "fr": "Bureau",
        "it": "Scrivania",
        "pt": r"Área de Trabalho"
    }
    user = str(subprocess.check_output(['logname']).strip(), encoding='utf-8')
    try:
        ndesktop = desktop[DEFAULT_LANGUAGE[0][:2]]
        directory = join("/home", user, ndesktop)
        defaultdir = join("/home", user, "Desktop")
        if not exists(directory):
            if exists(defaultdir):
                directory = defaultdir
            else:
                if not option_q:
                    directory = _askdirectory(title="Locate Desktop Directory",
                                              initialdir=join(expanduser('~')),
                                              frame=frame)
                else:
                    directory = None
    except KeyError:
        if not option_q:
            directory = _askdirectory(title="Locate Desktop Directory",
                                      initialdir=join(expanduser('~')),
                                      frame=frame)
        else:
            directory = None
    if directory is None:
        sys.stderr.write("Desktop shortcut creation aborted!\n")
        return False
    try:
        link = join(directory, "RIDE.desktop")
    except UnicodeError:
        link = join(directory.encode('utf-8'), "RIDE.desktop")
    if not exists(link) or option_f:
        if not option_q and not option_f:
            if not _askyesno("Setup", "Create desktop shortcut?", frame):
                return False
        roboticon = join(sysconfig.get_paths()["purelib"], "robotide",
                         "widgets", "robot.ico")
        if not exists(roboticon):
            try:
                import robotide as _
                roboticon = join(_.__path__[0], "widgets", "robot.ico")
            except ImportError:
                pass
            if not exists(roboticon):
                roboticon = join(
                    "FIXME: find correct path to: .../site-packages/",
                    "robotide", "widgets", "robot.ico")
        with open(link, "w+") as shortcut:
            shortcut.write(f"#!/usr/bin/env xdg-open\n[Desktop Entry]\n"
                           f"Exec={sys.executable} -m robotide.__init__\n"
                           f"Comment=A Robot Framework IDE\nGenericName=RIDE\n"
                           f"Icon={roboticon}\n"
                           f"Name=RIDE\nStartupNotify=true\nTerminal=false\n"
                           "Type=Application\nX-KDE-SubstituteUID=false\n")
            uid = pwd.getpwnam(user).pw_uid
            chown(link, uid, -1)  # groupid == -1 means keep unchanged
            chmod(link, 0o744)
コード例 #37
0
ファイル: setup.py プロジェクト: yfyh2013/pycalphad
def symengine_h_get_include():
    if sys.platform == 'win32':
        # Strictly only valid for recent conda installations
        return os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(symengine.__file__)))), 'Library', 'include')
    else:
        return os.path.dirname(get_paths()['include'])
コード例 #38
0
ファイル: setup.py プロジェクト: kivy/kivy
            gst_flags = {
                'extra_link_args': [
                    '-F/Library/Frameworks',
                    '-Xlinker', '-rpath',
                    '-Xlinker', '/Library/Frameworks',
                    '-Xlinker', '-headerpad',
                    '-Xlinker', '190',
                    '-framework', 'GStreamer'],
                'include_dirs': [join(f_path, 'Headers')]}
    elif platform == 'win32':
        gst_flags = pkgconfig('gstreamer-1.0')
        if 'libraries' in gst_flags:
            print('GStreamer found via pkg-config')
            gstreamer_valid = True
            c_options['use_gstreamer'] = True
        elif exists(join(get_paths()['include'], 'gst', 'gst.h')):
            print('GStreamer found via gst.h')
            gstreamer_valid = True
            c_options['use_gstreamer'] = True
            gst_flags = {
                'libraries': ['gstreamer-1.0', 'glib-2.0', 'gobject-2.0']}

    if not gstreamer_valid:
        # use pkg-config approach instead
        gst_flags = pkgconfig('gstreamer-1.0')
        if 'libraries' in gst_flags:
            print('GStreamer found via pkg-config')
            c_options['use_gstreamer'] = True


# detect SDL2, only on desktop and iOS, or android if explicitly enabled
コード例 #39
0
ファイル: sysconfig_get_paths.py プロジェクト: deweing/PyMOTW
#!/usr/bin/env python
# encoding: utf-8
#
# Copyright (c) 2010 Doug Hellmann.  All rights reserved.
#
"""The paths for a scheme.
"""
#end_pymotw_header

import sysconfig
import pprint

for scheme in ['posix_prefix', 'posix_user']:
    print scheme
    print '=' * len(scheme)
    pprint.pprint(sysconfig.get_paths(scheme=scheme))
    print 
コード例 #40
0
ファイル: cpp_extension.py プロジェクト: xiongyw/pytorch
def _write_ninja_file(path,
                      name,
                      sources,
                      extra_cflags,
                      extra_cuda_cflags,
                      extra_ldflags,
                      extra_include_paths,
                      with_cuda=False):
    extra_cflags = [flag.strip() for flag in extra_cflags]
    extra_cuda_cflags = [flag.strip() for flag in extra_cuda_cflags]
    extra_ldflags = [flag.strip() for flag in extra_ldflags]
    extra_include_paths = [flag.strip() for flag in extra_include_paths]

    # Version 1.3 is required for the `deps` directive.
    config = ['ninja_required_version = 1.3']
    config.append('cxx = {}'.format(os.environ.get('CXX', 'c++')))
    if with_cuda:
        config.append('nvcc = {}'.format(_join_cuda_home('bin', 'nvcc')))

    # Turn into absolute paths so we can emit them into the ninja build
    # file wherever it is.
    sources = [os.path.abspath(file) for file in sources]
    includes = [os.path.abspath(file) for file in extra_include_paths]

    # include_paths() gives us the location of torch/torch.h
    includes += include_paths(with_cuda)
    # sysconfig.get_paths()['include'] gives us the location of Python.h
    includes.append(sysconfig.get_paths()['include'])

    common_cflags = ['-DTORCH_EXTENSION_NAME={}'.format(name)]
    common_cflags += ['-I{}'.format(include) for include in includes]

    cflags = common_cflags + ['-fPIC', '-std=c++11'] + extra_cflags
    if sys.platform == 'win32':
        from distutils.spawn import _nt_quote_args
        cflags = _nt_quote_args(cflags)
    flags = ['cflags = {}'.format(' '.join(cflags))]

    if with_cuda:
        cuda_flags = common_cflags
        if sys.platform == 'win32':
            cuda_flags = _nt_quote_args(cuda_flags)
        else:
            cuda_flags += ['--compiler-options', "'-fPIC'"]
            cuda_flags += extra_cuda_cflags
            if not any(flag.startswith('-std=') for flag in cuda_flags):
                cuda_flags.append('-std=c++11')

        flags.append('cuda_flags = {}'.format(' '.join(cuda_flags)))

    if sys.platform == 'win32':
        ldflags = ['/DLL'] + extra_ldflags
    else:
        ldflags = ['-shared'] + extra_ldflags
    # The darwin linker needs explicit consent to ignore unresolved symbols.
    if sys.platform == 'darwin':
        ldflags.append('-undefined dynamic_lookup')
    elif sys.platform == 'win32':
        ldflags = _nt_quote_args(ldflags)
    flags.append('ldflags = {}'.format(' '.join(ldflags)))

    # See https://ninja-build.org/build.ninja.html for reference.
    compile_rule = ['rule compile']
    if sys.platform == 'win32':
        compile_rule.append(
            '  command = cl /showIncludes $cflags -c $in /Fo$out')
        compile_rule.append('  deps = msvc')
    else:
        compile_rule.append(
            '  command = $cxx -MMD -MF $out.d $cflags -c $in -o $out')
        compile_rule.append('  depfile = $out.d')
        compile_rule.append('  deps = gcc')

    if with_cuda:
        cuda_compile_rule = ['rule cuda_compile']
        cuda_compile_rule.append(
            '  command = $nvcc $cuda_flags -c $in -o $out')

    link_rule = ['rule link']
    if sys.platform == 'win32':
        cl_paths = subprocess.check_output(['where',
                                            'cl']).decode().split('\r\n')
        if len(cl_paths) >= 1:
            cl_path = os.path.dirname(cl_paths[0]).replace(':', '$:')
        else:
            raise RuntimeError("MSVC is required to load C++ extensions")
        link_rule.append(
            '  command = "{}/link.exe" $in /nologo $ldflags /out:$out'.format(
                cl_path))
    else:
        link_rule.append('  command = $cxx $ldflags $in -o $out')

    # Emit one build rule per source to enable incremental build.
    object_files = []
    build = []
    for source_file in sources:
        # '/path/to/file.cpp' -> 'file'
        file_name = os.path.splitext(os.path.basename(source_file))[0]
        if _is_cuda_file(source_file) and with_cuda:
            rule = 'cuda_compile'
            # Use a different object filename in case a C++ and CUDA file have
            # the same filename but different extension (.cpp vs. .cu).
            target = '{}.cuda.o'.format(file_name)
        else:
            rule = 'compile'
            target = '{}.o'.format(file_name)
        object_files.append(target)
        if sys.platform == 'win32':
            source_file = source_file.replace(':', '$:')
        build.append('build {}: {} {}'.format(target, rule, source_file))

    ext = '.pyd' if sys.platform == 'win32' else '.so'
    library_target = '{}{}'.format(name, ext)
    link = ['build {}: link {}'.format(library_target, ' '.join(object_files))]

    default = ['default {}'.format(library_target)]

    # 'Blocks' should be separated by newlines, for visual benefit.
    blocks = [config, flags, compile_rule]
    if with_cuda:
        blocks.append(cuda_compile_rule)
    blocks += [link_rule, build, link, default]
    with open(path, 'w') as build_file:
        for block in blocks:
            lines = '\n'.join(block)
            build_file.write('{}\n\n'.format(lines))
コード例 #41
0
 def is_library(self):
     return any(
         self.filename.startswith(path)
         for path in sysconfig.get_paths().values())
コード例 #42
0
def get_include():
    info = get_paths()
    seqan_path = '/'.join(info['include'].split('/')[:-1])
    # seqan_path += '/seqan'
    return seqan_path
コード例 #43
0
def install_package(install_name):
    try:
        xacc.info("Retrieving package and checking requirements..")
        package_path = PLUGIN_INSTALLATIONS[install_name]
        for k, v in MASTER_PACKAGES.items():
            if install_name in v and k in REQUIREMENTS:
                requirement = REQUIREMENTS[k]['module']
                mdir = k
                importlib.import_module(requirement)
    except KeyError as ex:
        xacc.info(
            "There is no '{}' XACC Python plugin package available.".format(
                install_name))
        exit(1)
    # this might have to change as more packages and their requirements get added
    # for now, it works fine, and should work fine for any XACC requirement
    # that needs to be git-cloned and built with cmake-make (vqe)
    except ModuleNotFoundError as ex:
        xacc.info(
            "You do not have the required Python module `{}` to install and run the '{}' XACC benchmark plugin package."
            .format(requirement, install_name))
        yn = input("Install requirements? (y/n) ")
        if yn == "y":
            dest = os.path.dirname(inspect.getfile(xacc))
            install_path = os.path.join(dest, REQUIREMENTS[mdir]['dir'])
            build_path = os.path.join(install_path, 'build')
            os.chdir(dest)
            subprocess.run([
                'git', 'clone', '--recursive',
                '{}'.format(REQUIREMENTS[mdir]['repo'])
            ])
            os.makedirs(build_path)
            os.chdir(build_path)
            subprocess.run([
                'cmake', '..', '-DXACC_DIR={}'.format(dest),
                '-DPYTHON_INCLUDE_DIR={}'.format(
                    sysconfig.get_paths()['include'])
            ])
            subprocess.run(['make', '-j2', 'install'])
        else:
            exit(1)

    install_directive = os.path.join(package_path +
                                     "/install.ini") if os.path.isfile(
                                         package_path +
                                         "/install.ini") else None
    plugin_files = []
    if not install_directive:
        plugin_files += [
            package_path + "/" + f for f in os.listdir(package_path) if
            os.path.isfile(os.path.join(package_path, f)) and f.endswith(".py")
        ]
    else:
        plugin_dict, plugin_list = read_install_directive(
            install_directive, package_path)
        for k, v in plugin_dict.items():
            mini_package_path = v
            plugin_files += [
                v + "/" + f for f in os.listdir(v)
                if os.path.isfile(os.path.join(v, f)) and f.endswith(".py")
            ]
    n_plugins = len(plugin_files)
    for plugin in plugin_files:
        copy(os.path.join(plugin), XACC_PYTHON_PLUGIN_PATH)

    xacc.info(
        "Installed {} plugins from the '{}' package to the {} directory.".
        format(n_plugins, install_name, XACC_PYTHON_PLUGIN_PATH))
コード例 #44
0
import sysconfig
import numpy as np
import os

#python include directory
dic = sysconfig.get_paths()
print('PYTHON_INC = ',dic['include'])

#binary modules suffix
suffix=sysconfig.get_config_var('SOABI')
print('SUFFIX = ',suffix+'.so')

inc= np.get_include()
print('NUMPY_INC = ',inc)

#os.system('f2py --help-link f2py_info | grep sources | sed -e "s/sources/F2PY_SRC/" | sed -e "s/\[//" | sed -e "s/\]//"')
#os.system('f2py --help-link f2py_info | grep include_dirs | sed -e "s/include_dirs/F2PY_INC/" | sed -e "s/\[//" | sed -e "s/\]//"')
コード例 #45
0
 def getPythonLibFolder():
     from sysconfig import get_paths
     return os.path.join(get_paths()['data'], "lib")
コード例 #46
0
ファイル: create.py プロジェクト: MrMalina/Source.Python
        def setup_mock(**attrs):
            """Mock the setup(**attrs) in order to retrieve metadata."""

            # TODO use config and metadata instead of Distribution
            from distutils.dist import Distribution
            dist = Distribution(attrs)
            dist.parse_config_files()

            # 1. retrieve metadata fields that are quite similar in
            # PEP 314 and PEP 345
            labels = (('name',) * 2,
                      ('version',) * 2,
                      ('author',) * 2,
                      ('author_email',) * 2,
                      ('maintainer',) * 2,
                      ('maintainer_email',) * 2,
                      ('description', 'summary'),
                      ('long_description', 'description'),
                      ('url', 'home_page'),
                      ('platforms', 'platform'),
                      ('provides', 'provides-dist'),
                      ('obsoletes', 'obsoletes-dist'),
                      ('requires', 'requires-dist'))

            get = lambda lab: getattr(dist.metadata, lab.replace('-', '_'))
            data.update((new, get(old)) for old, new in labels if get(old))

            # 2. retrieve data that requires special processing
            data['classifier'].update(dist.get_classifiers() or [])
            data['scripts'].extend(dist.scripts or [])
            data['packages'].extend(dist.packages or [])
            data['modules'].extend(dist.py_modules or [])
            # 2.1 data_files -> resources
            if dist.data_files:
                if (len(dist.data_files) < 2 or
                    isinstance(dist.data_files[1], str)):
                    dist.data_files = [('', dist.data_files)]
                # add tokens in the destination paths
                vars = {'distribution.name': data['name']}
                path_tokens = sysconfig.get_paths(vars=vars).items()
                # sort tokens to use the longest one first
                path_tokens = sorted(path_tokens, key=lambda x: len(x[1]))
                for dest, srcs in (dist.data_files or []):
                    dest = os.path.join(sys.prefix, dest)
                    dest = dest.replace(os.path.sep, '/')
                    for tok, path in path_tokens:
                        path = path.replace(os.path.sep, '/')
                        if not dest.startswith(path):
                            continue

                        dest = ('{%s}' % tok) + dest[len(path):]
                        files = [('/ '.join(src.rsplit('/', 1)), dest)
                                 for src in srcs]
                        data['resources'].extend(files)

            # 2.2 package_data
            data['package_data'] = dist.package_data.copy()

            # Use README file if its content is the desciption
            if "description" in data:
                ref = md5(re.sub('\s', '',
                                 self.data['description']).lower().encode())
                ref = ref.digest()
                for readme in glob.glob('README*'):
                    with open(readme, encoding='utf-8') as fp:
                        contents = fp.read()
                    contents = re.sub('\s', '', contents.lower()).encode()
                    val = md5(contents).digest()
                    if val == ref:
                        del data['description']
                        data['description-file'] = readme
                        break
コード例 #47
0
import os
import time
from datetime import datetime
from playsound import playsound
import pyttsx3
import cv2 as cv
import sysconfig

# Set up audio files.
root_dir = os.path.abspath('.')
gunfire_path = os.path.join(root_dir, 'gunfire.wav')
tone_path = os.path.join(root_dir, 'tone.wav')

# Set up Haar cascades for face detection.
path = sysconfig.get_paths()['purelib'] + '/cv2/data/'
face_cascade = cv.CascadeClassifier(path + 'haarcascade_frontalface_alt.xml')
face2_cascade = cv.CascadeClassifier(path + 'haarcascade_frontalface_alt2.xml')
eye_cascade = cv.CascadeClassifier(path + 'haarcascade_eye.xml')
catface_cascade = cv.CascadeClassifier(path + 'haarcascade_frontalcatface.xml')
catface2_cascade = cv.CascadeClassifier(
    path + 'hhaarcascade_frontalcatface_extended.xml')

# Set up corridor images.
os.chdir('corridor_5')
contents = sorted(os.listdir())
#remove '.DS_Store' git file
if '.DS_Store' in contents:
    contents.remove('.DS_Store')

# Detect faces and fire or disable gun.
for image in contents:
コード例 #48
0
ファイル: setup.py プロジェクト: zhangzhenling/kivy
            gst_flags = {
                'extra_link_args': [
                    '-F/Library/Frameworks',
                    '-Xlinker', '-rpath',
                    '-Xlinker', '/Library/Frameworks',
                    '-Xlinker', '-headerpad',
                    '-Xlinker', '190',
                    '-framework', 'GStreamer'],
                'include_dirs': [join(f_path, 'Headers')]}
    elif platform == 'win32':
        gst_flags = pkgconfig('gstreamer-1.0')
        if 'libraries' in gst_flags:
            print('GStreamer found via pkg-config')
            gstreamer_valid = True
            c_options['use_gstreamer'] = True
        elif exists(join(get_paths()['include'], 'gst', 'gst.h')):
            print('GStreamer found via gst.h')
            gstreamer_valid = True
            c_options['use_gstreamer'] = True
            gst_flags = {
                'libraries': ['gstreamer-1.0', 'glib-2.0', 'gobject-2.0']}

    if not gstreamer_valid:
        # use pkg-config approach instead
        gst_flags = pkgconfig('gstreamer-1.0')
        if 'libraries' in gst_flags:
            print('GStreamer found via pkg-config')
            c_options['use_gstreamer'] = True


# detect SDL2, only on desktop and iOS, or android if explicitly enabled
コード例 #49
0
import distutils.ccompiler
import distutils.command.clean
import glob
import inspect
import multiprocessing
import multiprocessing.pool
import os
import platform
import re
import shutil
import subprocess
import sys

pytorch_install_dir = os.path.dirname(os.path.abspath(torch.__file__))
base_dir = os.path.dirname(os.path.abspath(__file__))
python_include_dir = get_paths()['include']


def _check_env_flag(name, default=''):
    return os.getenv(name, default).upper() in ['ON', '1', 'YES', 'TRUE', 'Y']


def _get_env_backend():
    env_backend_var_name = 'IPEX_BACKEND'
    env_backend_options = ['cpu', 'gpu']
    env_backend_val = os.getenv(env_backend_var_name)
    if env_backend_val is None or env_backend_val.strip() == '':
        return 'cpu'
    else:
        if env_backend_val not in env_backend_options:
            print("Intel PyTorch Extension only supports CPU and GPU now.")
コード例 #50
0
import sys
import re
import shutil
import argparse
import platform
import subprocess
import sysconfig
from urllib.request import urlopen
from pathlib import Path

BUILD_DIR = Path("build").resolve()

TOOLS_VENV_DIR = BUILD_DIR / "tools_venv"
WHEELS_DIR = BUILD_DIR / "wheels"

CPYTHON_DIR = Path(sysconfig.get_paths()["data"]).resolve()
CPYTHON_VERSION = f"{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}"
CPYTHON_ARCHSLUG = "win32" if platform.architecture(
)[0] == "32bit" else "amd64"
CPYTHON_DISTRIB_NAME = f"python-{CPYTHON_VERSION}-embed-{CPYTHON_ARCHSLUG}"
CPYTHON_DISTRIB_URL = (
    f"https://www.python.org/ftp/python/{CPYTHON_VERSION}/{CPYTHON_DISTRIB_NAME}.zip"
)
CPYTHON_DISTRIB_ARCHIVE = BUILD_DIR / f"{CPYTHON_DISTRIB_NAME}.zip"


def get_archslug():
    bits, _ = platform.architecture()
    return "win32" if bits == "32bit" else "win64"

コード例 #51
0
    here = os.path.abspath(os.path.dirname(__file__))
    with codecs.open(os.path.join(here, rel_path), 'r') as fp:
        return fp.read()


def get_version(rel_path):
    for line in read(rel_path).splitlines():
        if line.startswith('__version__'):
            delim = '"' if '"' in line else "'"
            return line.split(delim)[1]
    else:
        raise RuntimeError("Unable to find version string.")


vinfo = sys.version_info
path_info = get_paths()

with open(README, "r") as fh:
    long_description = fh.read()

if IS_WINDOWS:
    arch = platform.architecture()[0][:2]
    boost_library = f'boost_python{vinfo.major}{vinfo.minor}*'
    boost_include = [BOOST_DIR]
    boost_lib = glob.glob(os.path.join(BOOST_DIR, f'lib{arch}-msvc-*'))
elif IS_LINUX:
    arch = platform.machine()
    syst = platform.system().lower()
    boost_library = f'boost_python{vinfo.major}{vinfo.minor}'
    boost_include = [os.path.join(BOOST_DIR, 'include')]
    boost_lib = [
コード例 #52
0
def _path_to_openwfom():
    return (sysconfig.get_paths()["purelib"] + "\\openwfom")
コード例 #53
0
#######################################################################################
#  In the name of God, the Compassionate, the Merciful
#  Pyabr (c) 2020 Mani Jamali. GNU General Public License v3.0
#
#  Official Website: 		http://pyabr.rf.gd
#  Programmer & Creator:    Mani Jamali <*****@*****.**>
#  Gap channel: 			@pyabr
#  Gap group:   			@pyabr_community
#  Git source:              github.com/PyFarsi/pyabr
#
#######################################################################################

import site, shutil, os, sys

#print(site.getusersitepackages()) # https://stackoverflow.com/questions/122327/how-do-i-find-the-location-of-my-python-site-packages-directory
import sysconfig
#print(sysconfig.get_paths()["purelib"]) #changed. works for virtual envs, too.

#s = site.getusersitepackages()
s = sysconfig.get_paths()["purelib"]
shutil.copyfile(s+r"\pyabr\pyabr.zip","pyabr.zip") # changed / to \
shutil.unpack_archive("pyabr.zip","pyabr-install","zip")
os.system("cd pyabr-install && \""+sys.executable+"\" setup.py")
shutil.rmtree("pyabr-install")
os.remove("pyabr.zip")
コード例 #54
0
def get_include_dir():
    """Returns the path to the Python environment's include dir."""
    return get_paths()['include']
コード例 #55
0
from sysconfig import get_paths
from pprint import pprint

info = get_paths()  # a dictionary of key-paths

# pretty print it for now
pprint(info)
{
    'data': '/usr/local',
    'include': '/usr/local/include/python2.7',
    'platinclude': '/usr/local/include/python2.7',
    'platlib': '/usr/local/lib/python2.7/dist-packages',
    'platstdlib': '/usr/lib/python2.7',
    'purelib': '/usr/local/lib/python2.7/dist-packages',
    'scripts': '/usr/local/bin',
    'stdlib': '/usr/lib/python2.7'
}