コード例 #1
0
ファイル: installer.py プロジェクト: guanym98k/Python_-
            cmd.extend(('--index-url', index_url))
        if find_links is not None:
            for link in find_links:
                cmd.extend(('--find-links', link))
        # If requirement is a PEP 508 direct URL, directly pass
        # the URL to pip, as `req @ url` does not work on the
        # command line.
        if req.url:
            cmd.append(req.url)
        else:
            cmd.append(str(req))
        try:
            subprocess.check_call(cmd)
        except subprocess.CalledProcessError as e:
<<<<<<< HEAD
            raise DistutilsError(str(e)) from e
=======
            raise DistutilsError(str(e))
>>>>>>> b66a76afa15ab74019740676a52a071b85ed8f71
        wheel = Wheel(glob.glob(os.path.join(tmpdir, '*.whl'))[0])
        dist_location = os.path.join(eggs_dir, wheel.egg_name())
        wheel.install_as_egg(dist_location)
        dist_metadata = pkg_resources.PathMetadata(
            dist_location, os.path.join(dist_location, 'EGG-INFO'))
        dist = pkg_resources.Distribution.from_filename(
            dist_location, metadata=dist_metadata)
        return dist


def strip_marker(req):
    """
コード例 #2
0
ファイル: upload.py プロジェクト: Snowgrass/stackless
    def upload_file(self, command, pyversion, filename):
        # Makes sure the repository URL is compliant
        schema, netloc, url, params, query, fragments = \
            urlparse(self.repository)
        if params or query or fragments:
            raise AssertionError("Incompatible url %s" % self.repository)

        if schema not in ('http', 'https'):
            raise AssertionError("unsupported schema " + schema)

        # Sign if requested
        if self.sign:
            gpg_args = ["gpg", "--detach-sign", "-a", filename]
            if self.identity:
                gpg_args[2:2] = ["--local-user", self.identity]
            spawn(gpg_args, dry_run=self.dry_run)

        # Fill in the data - send all the meta-data in case we need to
        # register a new release
        f = open(filename, 'rb')
        try:
            content = f.read()
        finally:
            f.close()
        meta = self.distribution.metadata
        data = {
            # action
            ':action': 'file_upload',
            'protocol_version': '1',

            # identify release
            'name': meta.get_name(),
            'version': meta.get_version(),

            # file content
            'content': (os.path.basename(filename), content),
            'filetype': command,
            'pyversion': pyversion,
            'md5_digest': hashlib.md5(content).hexdigest(),

            # additional meta-data
            'metadata_version': '1.0',
            'summary': meta.get_description(),
            'home_page': meta.get_url(),
            'author': meta.get_contact(),
            'author_email': meta.get_contact_email(),
            'license': meta.get_licence(),
            'description': meta.get_long_description(),
            'keywords': meta.get_keywords(),
            'platform': meta.get_platforms(),
            'classifiers': meta.get_classifiers(),
            'download_url': meta.get_download_url(),
            # PEP 314
            'provides': meta.get_provides(),
            'requires': meta.get_requires(),
            'obsoletes': meta.get_obsoletes(),
        }
        comment = ''
        if command == 'bdist_rpm':
            dist, version, id = platform.dist()
            if dist:
                comment = 'built for %s %s' % (dist, version)
        elif command == 'bdist_dumb':
            comment = 'built for %s' % platform.platform(terse=1)
        data['comment'] = comment

        if self.sign:
            data['gpg_signature'] = (os.path.basename(filename) + ".asc",
                                     open(filename + ".asc", "rb").read())

        # set up the authentication
        user_pass = (self.username + ":" + self.password).encode('ascii')
        # The exact encoding of the authentication string is debated.
        # Anyway PyPI only accepts ascii for both username or password.
        auth = "Basic " + standard_b64encode(user_pass).decode('ascii')

        # Build up the MIME payload for the POST data
        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
        sep_boundary = b'\r\n--' + boundary.encode('ascii')
        end_boundary = sep_boundary + b'--\r\n'
        body = io.BytesIO()
        for key, value in data.items():
            title = '\r\nContent-Disposition: form-data; name="%s"' % key
            # handle multiple entries for the same name
            if not isinstance(value, list):
                value = [value]
            for value in value:
                if type(value) is tuple:
                    title += '; filename="%s"' % value[0]
                    value = value[1]
                else:
                    value = str(value).encode('utf-8')
                body.write(sep_boundary)
                body.write(title.encode('utf-8'))
                body.write(b"\r\n\r\n")
                body.write(value)
        body.write(end_boundary)
        body = body.getvalue()

        msg = "Submitting %s to %s" % (filename, self.repository)
        self.announce(msg, log.INFO)

        # build the Request
        headers = {
            'Content-type': 'multipart/form-data; boundary=%s' % boundary,
            'Content-length': str(len(body)),
            'Authorization': auth,
        }

        request = Request(self.repository, data=body, headers=headers)
        # send the data
        try:
            result = urlopen(request)
            status = result.getcode()
            reason = result.msg
        except HTTPError as e:
            status = e.code
            reason = e.msg
        except OSError as e:
            self.announce(str(e), log.ERROR)
            raise

        if status == 200:
            self.announce('Server response (%s): %s' % (status, reason),
                          log.INFO)
            if self.show_response:
                text = self._read_pypi_response(result)
                msg = '\n'.join(('-' * 75, text, '-' * 75))
                self.announce(msg, log.INFO)
        else:
            msg = 'Upload failed (%s): %s' % (status, reason)
            self.announce(msg, log.ERROR)
            raise DistutilsError(msg)
コード例 #3
0
ファイル: setup.py プロジェクト: ameihm0912/beam
 def run(self):
     import subprocess
     args = ['mypy', self.get_project_path()]
     result = subprocess.call(args)
     if result != 0:
         raise DistutilsError("mypy exited with status %d" % result)
コード例 #4
0
    def build_a_library(self, build_info, lib_name, libraries):
        # default compilers
        compiler = self.compiler
        fcompiler = self._f_compiler

        sources = build_info.get('sources')
        if sources is None or not is_sequence(sources):
            raise DistutilsSetupError(
                ("in 'libraries' option (library '%s'), " +
                 "'sources' must be present and must be " +
                 "a list of source filenames") % lib_name)
        sources = list(sources)

        c_sources, cxx_sources, f_sources, fmodule_sources \
            = filter_sources(sources)
        requiref90 = not not fmodule_sources or \
            build_info.get('language', 'c') == 'f90'

        # save source type information so that build_ext can use it.
        source_languages = []
        if c_sources:
            source_languages.append('c')
        if cxx_sources:
            source_languages.append('c++')
        if requiref90:
            source_languages.append('f90')
        elif f_sources:
            source_languages.append('f77')
        build_info['source_languages'] = source_languages

        lib_file = compiler.library_filename(lib_name,
                                             output_dir=self.build_clib)
        depends = sources + build_info.get('depends', [])
        if not (self.force or newer_group(depends, lib_file, 'newer')):
            log.debug("skipping '%s' library (up-to-date)", lib_name)
            return
        else:
            log.info("building '%s' library", lib_name)

        config_fc = build_info.get('config_fc', {})
        if fcompiler is not None and config_fc:
            log.info('using additional config_fc from setup script '
                     'for fortran compiler: %s' % (config_fc, ))
            from numpy.distutils.fcompiler import new_fcompiler
            fcompiler = new_fcompiler(compiler=fcompiler.compiler_type,
                                      verbose=self.verbose,
                                      dry_run=self.dry_run,
                                      force=self.force,
                                      requiref90=requiref90,
                                      c_compiler=self.compiler)
            if fcompiler is not None:
                dist = self.distribution
                base_config_fc = dist.get_option_dict('config_fc').copy()
                base_config_fc.update(config_fc)
                fcompiler.customize(base_config_fc)

        # check availability of Fortran compilers
        if (f_sources or fmodule_sources) and fcompiler is None:
            raise DistutilsError("library %s has Fortran sources"
                                 " but no Fortran compiler found" % (lib_name))

        if fcompiler is not None:
            fcompiler.extra_f77_compile_args = build_info.get(
                'extra_f77_compile_args') or []
            fcompiler.extra_f90_compile_args = build_info.get(
                'extra_f90_compile_args') or []

        macros = build_info.get('macros')
        include_dirs = build_info.get('include_dirs')
        if include_dirs is None:
            include_dirs = []
        extra_postargs = build_info.get('extra_compiler_args') or []

        include_dirs.extend(get_numpy_include_dirs())
        # where compiled F90 module files are:
        module_dirs = build_info.get('module_dirs') or []
        module_build_dir = os.path.dirname(lib_file)
        if requiref90:
            self.mkpath(module_build_dir)

        if compiler.compiler_type == 'msvc':
            # this hack works around the msvc compiler attributes
            # problem, msvc uses its own convention :(
            c_sources += cxx_sources
            cxx_sources = []

        objects = []
        if c_sources:
            log.info("compiling C sources")
            objects = compiler.compile(c_sources,
                                       output_dir=self.build_temp,
                                       macros=macros,
                                       include_dirs=include_dirs,
                                       debug=self.debug,
                                       extra_postargs=extra_postargs)

        if cxx_sources:
            log.info("compiling C++ sources")
            cxx_compiler = compiler.cxx_compiler()
            cxx_objects = cxx_compiler.compile(cxx_sources,
                                               output_dir=self.build_temp,
                                               macros=macros,
                                               include_dirs=include_dirs,
                                               debug=self.debug,
                                               extra_postargs=extra_postargs)
            objects.extend(cxx_objects)

        if f_sources or fmodule_sources:
            extra_postargs = []
            f_objects = []

            if requiref90:
                if fcompiler.module_dir_switch is None:
                    existing_modules = glob('*.mod')
                extra_postargs += fcompiler.module_options(
                    module_dirs, module_build_dir)

            if fmodule_sources:
                log.info("compiling Fortran 90 module sources")
                f_objects += fcompiler.compile(fmodule_sources,
                                               output_dir=self.build_temp,
                                               macros=macros,
                                               include_dirs=include_dirs,
                                               debug=self.debug,
                                               extra_postargs=extra_postargs)

            if requiref90 and self._f_compiler.module_dir_switch is None:
                # move new compiled F90 module files to module_build_dir
                for f in glob('*.mod'):
                    if f in existing_modules:
                        continue
                    t = os.path.join(module_build_dir, f)
                    if os.path.abspath(f) == os.path.abspath(t):
                        continue
                    if os.path.isfile(t):
                        os.remove(t)
                    try:
                        self.move_file(f, module_build_dir)
                    except DistutilsFileError:
                        log.warn('failed to move %r to %r' %
                                 (f, module_build_dir))

            if f_sources:
                log.info("compiling Fortran sources")
                f_objects += fcompiler.compile(f_sources,
                                               output_dir=self.build_temp,
                                               macros=macros,
                                               include_dirs=include_dirs,
                                               debug=self.debug,
                                               extra_postargs=extra_postargs)
        else:
            f_objects = []

        if f_objects and not fcompiler.can_ccompiler_link(compiler):
            # Default linker cannot link Fortran object files, and results
            # need to be wrapped later. Instead of creating a real file
            # library, just keep track of the object files.
            listfn = os.path.join(self.build_clib, lib_name + '.fobjects')
            with open(listfn, 'w') as f:
                f.write("\n".join(os.path.abspath(obj) for obj in f_objects))

            listfn = os.path.join(self.build_clib, lib_name + '.cobjects')
            with open(listfn, 'w') as f:
                f.write("\n".join(os.path.abspath(obj) for obj in objects))

            # create empty "library" file for dependency tracking
            lib_fname = os.path.join(self.build_clib,
                                     lib_name + compiler.static_lib_extension)
            with open(lib_fname, 'wb') as f:
                pass
        else:
            # assume that default linker is suitable for
            # linking Fortran object files
            objects.extend(f_objects)
            compiler.create_static_lib(objects,
                                       lib_name,
                                       output_dir=self.build_clib,
                                       debug=self.debug)

        # fix library dependencies
        clib_libraries = build_info.get('libraries', [])
        for lname, binfo in libraries:
            if lname in clib_libraries:
                clib_libraries.extend(binfo.get('libraries', []))
        if clib_libraries:
            build_info['libraries'] = clib_libraries
コード例 #5
0
ファイル: installer.py プロジェクト: wolkolak/EZ_machining
def fetch_build_egg(dist, req):
    """Fetch an egg needed for building.

    Use pip/wheel to fetch/build a wheel."""
    # Check pip is available.
    try:
        pkg_resources.get_distribution('pip')
    except pkg_resources.DistributionNotFound:
        dist.announce(
            'WARNING: The pip package is not available, falling back '
            'to EasyInstall for handling setup_requires/test_requires; '
            'this is deprecated and will be removed in a future version.',
            log.WARN
        )
        return _legacy_fetch_build_egg(dist, req)
    # Warn if wheel is not.
    try:
        pkg_resources.get_distribution('wheel')
    except pkg_resources.DistributionNotFound:
        dist.announce('WARNING: The wheel package is not available.', log.WARN)
    # Ignore environment markers; if supplied, it is required.
    req = strip_marker(req)
    # Take easy_install options into account, but do not override relevant
    # pip environment variables (like PIP_INDEX_URL or PIP_QUIET); they'll
    # take precedence.
    opts = dist.get_option_dict('easy_install')
    if 'allow_hosts' in opts:
        raise DistutilsError('the `allow-hosts` option is not supported '
                             'when using pip to install requirements.')
    if 'PIP_QUIET' in os.environ or 'PIP_VERBOSE' in os.environ:
        quiet = False
    else:
        quiet = True
    if 'PIP_INDEX_URL' in os.environ:
        index_url = None
    elif 'index_url' in opts:
        index_url = opts['index_url'][1]
    else:
        index_url = None
    if 'find_links' in opts:
        find_links = _fixup_find_links(opts['find_links'][1])[:]
    else:
        find_links = []
    if dist.dependency_links:
        find_links.extend(dist.dependency_links)
    eggs_dir = os.path.realpath(dist.get_egg_cache_dir())
    environment = pkg_resources.Environment()
    for egg_dist in pkg_resources.find_distributions(eggs_dir):
        if egg_dist in req and environment.can_add(egg_dist):
            return egg_dist
    with TemporaryDirectory() as tmpdir:
        cmd = [
            sys.executable, '-m', 'pip',
            '--disable-pip-version-check',
            'wheel', '--no-deps',
            '-w', tmpdir,
        ]
        if quiet:
            cmd.append('--quiet')
        if index_url is not None:
            cmd.extend(('--index-url', index_url))
        if find_links is not None:
            for link in find_links:
                cmd.extend(('--find-links', link))
        # If requirement is a PEP 508 direct URL, directly pass
        # the URL to pip, as `req @ url` does not work on the
        # command line.
        if req.url:
            cmd.append(req.url)
        else:
            cmd.append(str(req))
        try:
            subprocess.check_call(cmd)
        except subprocess.CalledProcessError as e:
            raise DistutilsError(str(e)) from e
        wheel = Wheel(glob.glob(os.path.join(tmpdir, '*.whl'))[0])
        dist_location = os.path.join(eggs_dir, wheel.egg_name())
        wheel.install_as_egg(dist_location)
        dist_metadata = pkg_resources.PathMetadata(
            dist_location, os.path.join(dist_location, 'EGG-INFO'))
        dist = pkg_resources.Distribution.from_filename(
            dist_location, metadata=dist_metadata)
        return dist
コード例 #6
0
def fully_define_extension(build_ext):
    check_tf_version()

    tf_compile_flags, tf_link_flags = get_tf_flags(build_ext)
    mpi_flags = get_mpi_flags()

    gpu_allreduce = os.environ.get('HOROVOD_GPU_ALLREDUCE')
    if gpu_allreduce and gpu_allreduce != 'MPI' and gpu_allreduce != 'NCCL':
        raise DistutilsError('HOROVOD_GPU_ALLREDUCE=%s is invalid, supported '
                             'values are "", "MPI", "NCCL".' % gpu_allreduce)

    gpu_allgather = os.environ.get('HOROVOD_GPU_ALLGATHER')
    if gpu_allgather and gpu_allgather != 'MPI':
        raise DistutilsError('HOROVOD_GPU_ALLGATHER=%s is invalid, supported '
                             'values are "", "MPI".' % gpu_allgather)

    gpu_broadcast = os.environ.get('HOROVOD_GPU_BROADCAST')
    if gpu_broadcast and gpu_broadcast != 'MPI':
        raise DistutilsError('HOROVOD_GPU_BROADCAST=%s is invalid, supported '
                             'values are "", "MPI".' % gpu_broadcast)

    if gpu_allreduce or gpu_allgather or gpu_broadcast:
        have_cuda = True
        cuda_include_dirs, cuda_lib_dirs = get_cuda_dirs(build_ext)
    else:
        have_cuda = False
        cuda_include_dirs = cuda_lib_dirs = []

    if gpu_allreduce == 'NCCL':
        have_nccl = True
        nccl_include_dirs, nccl_lib_dirs = get_nccl_dirs(
            build_ext, cuda_include_dirs, cuda_lib_dirs)
    else:
        have_nccl = False
        nccl_include_dirs = nccl_lib_dirs = []

    MACROS = []
    INCLUDES = []
    SOURCES = [
        'horovod/tensorflow/mpi_message.cc', 'horovod/tensorflow/mpi_ops.cc',
        'horovod/tensorflow/timeline.cc'
    ]
    COMPILE_FLAGS = ['-std=c++11', '-fPIC', '-O2'
                     ] + shlex.split(mpi_flags) + tf_compile_flags
    LINK_FLAGS = shlex.split(mpi_flags) + tf_link_flags
    LIBRARY_DIRS = []
    LIBRARIES = []

    if have_cuda:
        MACROS += [('HAVE_CUDA', '1')]
        INCLUDES += cuda_include_dirs
        LIBRARY_DIRS += cuda_lib_dirs
        LIBRARIES += ['cudart']

    if have_nccl:
        MACROS += [('HAVE_NCCL', '1')]
        INCLUDES += nccl_include_dirs
        LIBRARY_DIRS += nccl_lib_dirs
        LIBRARIES += ['nccl']

    if gpu_allreduce:
        MACROS += [('HOROVOD_GPU_ALLREDUCE', "'%s'" % gpu_allreduce[0])]

    if gpu_allgather:
        MACROS += [('HOROVOD_GPU_ALLGATHER', "'%s'" % gpu_allgather[0])]

    if gpu_broadcast:
        MACROS += [('HOROVOD_GPU_BROADCAST', "'%s'" % gpu_broadcast[0])]

    tensorflow_mpi_lib.define_macros = MACROS
    tensorflow_mpi_lib.include_dirs = INCLUDES
    tensorflow_mpi_lib.sources = SOURCES
    tensorflow_mpi_lib.extra_compile_args = COMPILE_FLAGS
    tensorflow_mpi_lib.extra_link_args = LINK_FLAGS
    tensorflow_mpi_lib.library_dirs = LIBRARY_DIRS
    tensorflow_mpi_lib.libraries = LIBRARIES
コード例 #7
0
def fetch_build_egg(dist, req):  # noqa: C901  # is too complex (16)  # FIXME
    """Fetch an egg needed for building.

    Use pip/wheel to fetch/build a wheel."""
    warnings.warn(
        "setuptools.installer is deprecated. Requirements should "
        "be satisfied by a PEP 517 installer.",
        SetuptoolsDeprecationWarning,
    )
    # Warn if wheel is not available
    try:
        pkg_resources.get_distribution('wheel')
    except pkg_resources.DistributionNotFound:
        dist.announce('WARNING: The wheel package is not available.', log.WARN)
    # Ignore environment markers; if supplied, it is required.
    req = strip_marker(req)
    # Take easy_install options into account, but do not override relevant
    # pip environment variables (like PIP_INDEX_URL or PIP_QUIET); they'll
    # take precedence.
    opts = dist.get_option_dict('easy_install')
    if 'allow_hosts' in opts:
        raise DistutilsError('the `allow-hosts` option is not supported '
                             'when using pip to install requirements.')
    quiet = 'PIP_QUIET' not in os.environ and 'PIP_VERBOSE' not in os.environ
    if 'PIP_INDEX_URL' in os.environ:
        index_url = None
    elif 'index_url' in opts:
        index_url = opts['index_url'][1]
    else:
        index_url = None
    find_links = (
        _fixup_find_links(opts['find_links'][1])[:] if 'find_links' in opts
        else []
    )
    if dist.dependency_links:
        find_links.extend(dist.dependency_links)
    eggs_dir = os.path.realpath(dist.get_egg_cache_dir())
    environment = pkg_resources.Environment()
    for egg_dist in pkg_resources.find_distributions(eggs_dir):
        if egg_dist in req and environment.can_add(egg_dist):
            return egg_dist
    with tempfile.TemporaryDirectory() as tmpdir:
        cmd = [
            sys.executable, '-m', 'pip',
            '--disable-pip-version-check',
            'wheel', '--no-deps',
            '-w', tmpdir,
        ]
        if quiet:
            cmd.append('--quiet')
        if index_url is not None:
            cmd.extend(('--index-url', index_url))
        for link in find_links or []:
            cmd.extend(('--find-links', link))
        # If requirement is a PEP 508 direct URL, directly pass
        # the URL to pip, as `req @ url` does not work on the
        # command line.
        cmd.append(req.url or str(req))
        try:
            subprocess.check_call(cmd)
        except subprocess.CalledProcessError as e:
            raise DistutilsError(str(e)) from e
        wheel = Wheel(glob.glob(os.path.join(tmpdir, '*.whl'))[0])
        dist_location = os.path.join(eggs_dir, wheel.egg_name())
        wheel.install_as_egg(dist_location)
        dist_metadata = pkg_resources.PathMetadata(
            dist_location, os.path.join(dist_location, 'EGG-INFO'))
        dist = pkg_resources.Distribution.from_filename(
            dist_location, metadata=dist_metadata)
        return dist
コード例 #8
0
    def upload_file(self, command, pyversion, filename):
        # Makes sure the repository URL is compliant
        schema, netloc, url, params, query, fragments = urlparse(
            self.repository)
        if params or query or fragments:
            raise AssertionError("Incompatible url %s" % self.repository)

        if schema not in ("http", "https"):
            raise AssertionError("unsupported schema " + schema)

        # Sign if requested
        if self.sign:
            gpg_args = ["gpg", "--detach-sign", "-a", filename]
            if self.identity:
                gpg_args[2:2] = ["--local-user", self.identity]
            spawn(gpg_args, dry_run=self.dry_run)

        # Fill in the data - send all the meta-data in case we need to
        # register a new release
        f = open(filename, "rb")
        try:
            content = f.read()
        finally:
            f.close()

        meta = self.distribution.metadata
        data = {
            # action
            ":action": "file_upload",
            "protocol_version": "1",
            # identify release
            "name": meta.get_name(),
            "version": meta.get_version(),
            # file content
            "content": (os.path.basename(filename), content),
            "filetype": command,
            "pyversion": pyversion,
            # additional meta-data
            "metadata_version": "1.0",
            "summary": meta.get_description(),
            "home_page": meta.get_url(),
            "author": meta.get_contact(),
            "author_email": meta.get_contact_email(),
            "license": meta.get_licence(),
            "description": meta.get_long_description(),
            "keywords": meta.get_keywords(),
            "platform": meta.get_platforms(),
            "classifiers": meta.get_classifiers(),
            "download_url": meta.get_download_url(),
            # PEP 314
            "provides": meta.get_provides(),
            "requires": meta.get_requires(),
            "obsoletes": meta.get_obsoletes(),
        }

        data["comment"] = ""

        # file content digests
        for digest_name, digest_cons in _FILE_CONTENT_DIGESTS.items():
            if digest_cons is None:
                continue
            try:
                data[digest_name] = digest_cons(content).hexdigest()
            except ValueError:
                # hash digest not available or blocked by security policy
                pass

        if self.sign:
            with open(filename + ".asc", "rb") as f:
                data["gpg_signature"] = (os.path.basename(filename) + ".asc",
                                         f.read())

        # set up the authentication
        user_pass = (self.username + ":" + self.password).encode("ascii")
        # The exact encoding of the authentication string is debated.
        # Anyway PyPI only accepts ascii for both username or password.
        auth = "Basic " + standard_b64encode(user_pass).decode("ascii")

        # Build up the MIME payload for the POST data
        boundary = "--------------GHSKFJDLGDS7543FJKLFHRE75642756743254"
        sep_boundary = b"\r\n--" + boundary.encode("ascii")
        end_boundary = sep_boundary + b"--\r\n"
        body = io.BytesIO()
        for key, value in data.items():
            title = '\r\nContent-Disposition: form-data; name="%s"' % key
            # handle multiple entries for the same name
            if not isinstance(value, list):
                value = [value]
            for value in value:
                if type(value) is tuple:
                    title += '; filename="%s"' % value[0]
                    value = value[1]
                else:
                    value = str(value).encode("utf-8")
                body.write(sep_boundary)
                body.write(title.encode("utf-8"))
                body.write(b"\r\n\r\n")
                body.write(value)
        body.write(end_boundary)
        body = body.getvalue()

        msg = "Submitting %s to %s" % (filename, self.repository)
        self.announce(msg, log.INFO)

        # build the Request
        headers = {
            "Content-type": "multipart/form-data; boundary=%s" % boundary,
            "Content-length": str(len(body)),
            "Authorization": auth,
        }

        request = Request(self.repository, data=body, headers=headers)
        # send the data
        try:
            result = urlopen(request)
            status = result.getcode()
            reason = result.msg
        except HTTPError as e:
            status = e.code
            reason = e.msg
        except OSError as e:
            self.announce(str(e), log.ERROR)
            raise

        if status == 200:
            self.announce("Server response (%s): %s" % (status, reason),
                          log.INFO)
            if self.show_response:
                text = self._read_pypi_response(result)
                msg = "\n".join(("-" * 75, text, "-" * 75))
                self.announce(msg, log.INFO)
        else:
            msg = "Upload failed (%s): %s" % (status, reason)
            self.announce(msg, log.ERROR)
            raise DistutilsError(msg)
コード例 #9
0
    def build_extensions(self):
        pre_setup.setup()

        make_option = ""
        # To resolve tf-gcc incompatibility
        has_cxx_flag = False
        glibcxx_flag = False
        if not int(os.environ.get('BYTEPS_WITHOUT_TENSORFLOW', 0)):
            try:
                import tensorflow as tf
                make_option += 'ADD_CFLAGS="'
                for flag in tf.sysconfig.get_compile_flags():
                    if 'D_GLIBCXX_USE_CXX11_ABI' in flag:
                        has_cxx_flag = True
                        glibcxx_flag = False if (flag[-1] == '0') else True
                        make_option += flag + ' '
                        break
                make_option += '" '
            except:
                pass

        # To resolve torch-gcc incompatibility
        if not int(os.environ.get('BYTEPS_WITHOUT_PYTORCH', 0)):
            try:
                import torch
                torch_flag = torch.compiled_with_cxx11_abi()
                if has_cxx_flag:
                    if glibcxx_flag != torch_flag:
                        raise DistutilsError(
                            '-D_GLIBCXX_USE_CXX11_ABI is not consistent between TensorFlow and PyTorch, '
                            'consider install them separately.')
                    else:
                        pass
                else:
                    make_option += 'ADD_CFLAGS=-D_GLIBCXX_USE_CXX11_ABI=' + \
                                    str(int(torch_flag)) + ' '
                    has_cxx_flag = True
                    glibcxx_flag = torch_flag
            except:
                pass

        print("build_ucx is", build_ucx())
        if build_ucx():
            ucx_path = pre_setup.ucx_path.strip()
            if not ucx_path:
                ucx_path = "https://codeload.github.com/openucx/ucx/zip/9229f54"
            print("ucx_path is", ucx_path)
            cmd = "sudo apt install -y build-essential libtool autoconf automake libnuma-dev unzip;" +\
            "rm -rf ucx*;" +\
            "curl " + ucx_path + " -o ucx.zip; " + \
                "unzip -o ./ucx.zip -d tmp; " + \
                "rm -rf ucx-build; mkdir -p ucx-build; mv tmp/ucx-*/* ucx-build/;" +\
                "cd ucx-build; pwd; which libtoolize; " + \
                "./autogen.sh; ./autogen.sh && ./contrib/configure-release --enable-mt && make -j && sudo make install -j"
            make_process = subprocess.Popen(cmd,
                                            cwd='3rdparty',
                                            stdout=sys.stdout,
                                            stderr=sys.stderr,
                                            shell=True)
            make_process.communicate()
            if make_process.returncode:
                raise DistutilsSetupError('An ERROR occured while running the '
                                          'Makefile for the ucx library. '
                                          'Exit code: {0}'.format(
                                              make_process.returncode))

        if not os.path.exists("3rdparty/ps-lite/build/libps.a") or \
           not os.path.exists("3rdparty/ps-lite/deps/lib"):
            if os.environ.get('CI', 'false') == 'false':
                make_option += "-j "
            if has_rdma_header():
                make_option += "USE_RDMA=1 "
            if build_ucx():
                make_option += 'USE_UCX=1 '

            make_option += pre_setup.extra_make_option()

            make_process = subprocess.Popen('make ' + make_option,
                                            cwd='3rdparty/ps-lite',
                                            stdout=sys.stdout,
                                            stderr=sys.stderr,
                                            shell=True)
            make_process.communicate()
            if make_process.returncode:
                raise DistutilsSetupError('An ERROR occured while running the '
                                          'Makefile for the ps-lite library. '
                                          'Exit code: {0}'.format(
                                              make_process.returncode))

        options = get_common_options(self)
        if has_cxx_flag:
            options['COMPILE_FLAGS'] += [
                '-D_GLIBCXX_USE_CXX11_ABI=' + str(int(glibcxx_flag))
            ]

        built_plugins = []
        try:
            build_server(self, options)
        except:
            raise DistutilsSetupError(
                'An ERROR occured while building the server module.\n\n'
                '%s' % traceback.format_exc())

        # If PyTorch is installed, it must be imported before others, otherwise
        # we may get an error: dlopen: cannot load any more object with static TLS
        if not int(os.environ.get('BYTEPS_WITHOUT_PYTORCH', 0)):
            dummy_import_torch()

        if not int(os.environ.get('BYTEPS_WITHOUT_TENSORFLOW', 0)):
            try:
                build_tf_extension(self, options)
                built_plugins.append(True)
                print('INFO: Tensorflow extension is built successfully.')
            except:
                if not int(os.environ.get('BYTEPS_WITH_TENSORFLOW', 0)):
                    print(
                        'INFO: Unable to build TensorFlow plugin, will skip it.\n\n'
                        '%s' % traceback.format_exc())
                    built_plugins.append(False)
                else:
                    raise
        if not int(os.environ.get('BYTEPS_WITHOUT_PYTORCH', 0)):
            try:
                torch_version = check_torch_version()
                build_torch_extension(self, options, torch_version)
                built_plugins.append(True)
                print('INFO: PyTorch extension is built successfully.')
            except:
                if not int(os.environ.get('BYTEPS_WITH_PYTORCH', 0)):
                    print(
                        'INFO: Unable to build PyTorch plugin, will skip it.\n\n'
                        '%s' % traceback.format_exc())
                    built_plugins.append(False)
                else:
                    raise
        if not int(os.environ.get('BYTEPS_WITHOUT_MXNET', 0)):
            # fix "libcuda.so.1 not found" issue
            cuda_home = os.environ.get('BYTEPS_CUDA_HOME', '/usr/local/cuda')
            cuda_stub_path = cuda_home + '/lib64/stubs'
            ln_command = "cd " + cuda_stub_path + "; ln -sf libcuda.so libcuda.so.1"
            os.system(ln_command)
            try:
                build_mx_extension(self, options)
                built_plugins.append(True)
                print('INFO: MXNet extension is built successfully.')
            except:
                if not int(os.environ.get('BYTEPS_WITH_MXNET', 0)):
                    print(
                        'INFO: Unable to build MXNet plugin, will skip it.\n\n'
                        '%s' % traceback.format_exc())
                    built_plugins.append(False)
                else:
                    raise
            finally:
                os.system("rm -rf " + cuda_stub_path + "/libcuda.so.1")

        if not built_plugins:
            print('INFO: Only server module is built.')
            return

        if not any(built_plugins):
            raise DistutilsError(
                'None of TensorFlow, MXNet, PyTorch plugins were built. See errors above.'
            )
コード例 #10
0
            file.write(
                "from setuptools import setup\n"
                "setup(name=%r, version=%r, py_modules=[%r])\n"
                % (
                    dists[0].project_name, dists[0].version,
                    os.path.splitext(basename)[0]
                )
            )
            file.close()
>>>>>>> e4baf504ede925f4f1e07d823c9b20b3d0dbe14c
            return filename

        elif match:
            raise DistutilsError(
                "Can't unambiguously interpret project/version identifier %r; "
                "any dashes in the name or version should be escaped using "
                "underscores. %r" % (fragment,dists)
            )
        else:
            raise DistutilsError(
                "Can't process plain .py files without an '#egg=name-version'"
                " suffix to enable automatic setup script generation."
            )

    dl_blocksize = 8192
    def _download_to(self, url, filename):
        self.info("Downloading %s", url)
        # Download the file
<<<<<<< HEAD
        fp, info = None, None
=======
コード例 #11
0
]

_SOCKET_TIMEOUT = 15

_tmpl = "setuptools/{setuptools.__version__} Python-urllib/{py_major}"
user_agent = _tmpl.format(
    py_major='{}.{}'.format(*sys.version_info), setuptools=setuptools)


def parse_requirement_arg(spec):
    try:
        return Requirement.parse(spec)
<<<<<<< HEAD
    except ValueError:
        raise DistutilsError(
            "Not a URL, existing file, or requirement spec: %r" % (spec,)
        )
=======
    except ValueError as e:
        raise DistutilsError(
            "Not a URL, existing file, or requirement spec: %r" % (spec,)
        ) from e
>>>>>>> 7e5c5fbd6c824de4d4c2b62da3f7cae87d462119


def parse_bdist_wininst(name):
    """Return (base,pyversion) or (None,None) for possible .exe name"""

    lower = name.lower()
    base, py_ver, plat = None, None, None
コード例 #12
0
    def _create_msi(self, dry_run=0):
        """Create the Windows Installer using WiX.

        Creates the Windows Installer using WiX and returns the name of
        the created MSI file.
        """
        # load the upgrade codes
        with open(os.path.join(MSIDATA_ROOT, "upgrade_codes.json")) as fp:
            upgrade_codes = json.load(fp)

        # version variables for Connector/Python and Python
        mycver = self.distribution.metadata.version
        match = re.match(r"(\d+)\.(\d+).(\d+).*", mycver)
        if not match:
            raise ValueError("Failed parsing version from {}".format(mycver))
        (major, minor, patch) = match.groups()
        pyver = self.python_version
        pymajor = pyver[0]
        pyminor = pyver[2]

        # check whether we have an upgrade code
        try:
            upgrade_code = upgrade_codes[mycver[0:3]][pyver]
        except KeyError:
            raise DistutilsError(
                "No upgrade code found for version v{cpy_ver}, "
                "Python v{py_ver}".format(cpy_ver=mycver, py_ver=pyver))
        self.log.info("upgrade code for v%s, Python v%s: %s", mycver, pyver,
                      upgrade_code)

        self.pyver_bdist_paths = self._find_bdist_paths()

        # wixobj's basename is the name of the installer
        wixobj = self._get_wixobj_name()
        msi = os.path.abspath(
            os.path.join(self.dist_dir, wixobj.replace(".wixobj", ".msi")))
        wixer = wix.WiX(self._wxs,
                        out=wixobj,
                        msi_out=msi,
                        base_path=self.build_base,
                        install=self.wix_install)

        # correct newlines and version in text files
        self.log.info("Fixing newlines in text files")
        info_files = []
        for txt_file_dest, txt_file_path in self._fix_txt_files.items():
            txt_fixed = os.path.join(self.build_base, txt_file_dest)
            info_files.append(txt_fixed)
            content = open(txt_file_path, "rb").read()

            if b"\r\n" not in content:
                self.log.info("converting newlines in %s", txt_fixed)
                content = content.replace(b"\n", b"\r\n")
                open(txt_fixed, "wb").write(content)
            else:
                self.log.info("not converting newlines in %s, this is odd",
                              txt_fixed)
                open(txt_fixed, "wb").write(content)

        digit_needle = r"Connector/Python \d{1,2}.\d{1,2}"
        xy_needle = "Connector/Python X.Y"
        xy_sub = "Connector/Python {0}.{1}"
        for info_file in info_files:
            self.log.info("correcting version in %s", info_file)
            with open(info_file, "r+") as fp:
                content = fp.readlines()
                for idx, line in enumerate(content):
                    content[idx] = re.sub(digit_needle,
                                          xy_sub.format(*VERSION[0:2]), line)
                    line = content[idx]
                    content[idx] = re.sub(xy_needle,
                                          xy_sub.format(*VERSION[0:2]), line)
                fp.seek(0)
                fp.write("".join(content))

        plat_type = "x64" if ARCH_64BIT else "x86"
        win64 = "yes" if ARCH_64BIT else "no"
        pyd_arch = "win_amd64" if ARCH_64BIT else "win32"
        directory_id = "ProgramFiles64Folder" if ARCH_64BIT else \
                       "ProgramFilesFolder"

        # For 3.5 the driver names are pretty complex, see
        # https://www.python.org/dev/peps/pep-0425/
        if pymajor == "3" and int(pyminor) >= 5:
            pyd_ext = ".cp%s%s-%s.pyd" % (pyver[0], 5, pyd_arch)
        else:
            pyd_ext = ".pyd"

        if self._connc_lib:
            if ARCH_64BIT:
                libcrypto_dll_path = os.path.join(
                    os.path.abspath(self._connc_lib), "libcrypto-1_1-x64.dll")
                libssl_dll_path = os.path.join(
                    os.path.abspath(self._connc_lib), "libssl-1_1-x64.dll")
            else:
                libcrypto_dll_path = os.path.join(
                    os.path.abspath(self._connc_lib), "libcrypto-1_1.dll")
                libssl_dll_path = os.path.join(
                    os.path.abspath(self._connc_lib), "libssl-1_1.dll")
        else:
            libcrypto_dll_path = ""
            libssl_dll_path = ""

        # WiX preprocessor variables
        params = {
            "Version":
            ".".join([major, minor, patch]),
            "FullVersion":
            mycver,
            "PythonVersion":
            pyver,
            "PythonMajor":
            pymajor,
            "PythonMinor":
            pyminor,
            "Major_Version":
            major,
            "Minor_Version":
            minor,
            "Patch_Version":
            patch,
            "Platform":
            plat_type,
            "Directory_Id":
            directory_id,
            "PythonInstallDir":
            "Python%s" % pyver.replace(".", ""),
            "PyExt":
            "pyc" if self.byte_code_only else "py",
            "ManualPDF":
            os.path.abspath(os.path.join("docs",
                                         "mysql-connector-python.pdf")),
            "ManualHTML":
            os.path.abspath(os.path.join("docs",
                                         "mysql-connector-python.html")),
            "UpgradeCode":
            upgrade_code,
            "MagicTag":
            get_magic_tag(),
            "BuildDir":
            os.path.abspath(self.build_base),
            "LibMySQLDLL":
            os.path.join(os.path.abspath(self._connc_lib), "libmysql.dll")
            if self._connc_lib else "",
            "LIBcryptoDLL":
            libcrypto_dll_path,
            "LIBSSLDLL":
            libssl_dll_path,
            "Win64":
            win64,
            "BitmapDir":
            os.path.join(os.getcwd(), "cpydist", "data", "msi"),
        }
        for py_ver in self._supported_versions:
            ver = py_ver.split(".")
            params["BDist{}{}".format(*ver)] = ""

            if ver[0] == '3' and int(ver[1]) >= 5:
                pyd_ext = ".cp%s%s-%s.pyd" % (ver[0], ver[1], pyd_arch)
            else:
                pyd_ext = ".pyd"

            params["CExtLibName{}{}".format(*ver)] = \
                "_mysql_connector{}".format(pyd_ext)
            params["CExtXPBName{}{}".format(*ver)] = \
                "_mysqlxpb{}".format(pyd_ext)
            params["HaveCExt{}{}".format(*ver)] = 0
            params["HaveLdapLibs{}{}".format(*ver)] = 0

            if py_ver in self.pyver_bdist_paths:
                params["BDist{}{}".format(*ver)] = \
                   self.pyver_bdist_paths[py_ver]
                if os.path.exists(
                        os.path.join(self.pyver_bdist_paths[py_ver],
                                     params["CExtLibName{}{}".format(*ver)])):
                    params["HaveCExt{}{}".format(*ver)] = 1
                if os.path.exists(
                        os.path.join(self.pyver_bdist_paths[py_ver], "mysql",
                                     "vendor",
                                     "authentication_ldap_sasl_client.dll")):
                    params["HaveLdapLibs{}{}".format(*ver)] = 1

        self.log.info("### wixer params:")
        for param in params:
            self.log.info("  %s: %s", param, params[param])
        wixer.set_parameters(params)

        if not dry_run:
            try:
                wixer.compile()
                wixer.link()
            except DistutilsError:
                raise

        if not self.keep_temp and not dry_run:
            self.log.info("WiX: cleaning up")
            os.unlink(msi.replace(".msi", ".wixpdb"))

        return msi
コード例 #13
0
ファイル: setup.py プロジェクト: yss4/horovod
def get_common_options(build_ext):
    cpp_flags = get_cpp_flags(build_ext)
    link_flags = get_link_flags(build_ext)
    mpi_flags = get_mpi_flags()

    gpu_allreduce = os.environ.get('HOROVOD_GPU_ALLREDUCE')
    if gpu_allreduce and gpu_allreduce != 'MPI' and gpu_allreduce != 'NCCL' and \
       gpu_allreduce != 'DDL':
        raise DistutilsError('HOROVOD_GPU_ALLREDUCE=%s is invalid, supported '
                             'values are "", "MPI", "NCCL", "DDL".' %
                             gpu_allreduce)

    gpu_allgather = os.environ.get('HOROVOD_GPU_ALLGATHER')
    if gpu_allgather and gpu_allgather != 'MPI':
        raise DistutilsError('HOROVOD_GPU_ALLGATHER=%s is invalid, supported '
                             'values are "", "MPI".' % gpu_allgather)

    gpu_broadcast = os.environ.get('HOROVOD_GPU_BROADCAST')
    if gpu_broadcast and gpu_broadcast != 'MPI':
        raise DistutilsError('HOROVOD_GPU_BROADCAST=%s is invalid, supported '
                             'values are "", "MPI".' % gpu_broadcast)

    if gpu_allreduce or gpu_allgather or gpu_broadcast:
        have_cuda = True
        cuda_include_dirs, cuda_lib_dirs = get_cuda_dirs(build_ext, cpp_flags)
    else:
        have_cuda = False
        cuda_include_dirs = cuda_lib_dirs = []

    if gpu_allreduce == 'NCCL':
        have_nccl = True
        nccl_include_dirs, nccl_lib_dirs, nccl_libs = get_nccl_vals(
            build_ext, cuda_include_dirs, cuda_lib_dirs, cpp_flags)
    else:
        have_nccl = False
        nccl_include_dirs = nccl_lib_dirs = nccl_libs = []

    if gpu_allreduce == 'DDL':
        have_ddl = True
        ddl_include_dirs, ddl_lib_dirs = get_ddl_dirs()
    else:
        have_ddl = False
        ddl_include_dirs = ddl_lib_dirs = []

    if (gpu_allreduce == 'NCCL'
            and (gpu_allgather == 'MPI' or gpu_broadcast == 'MPI')
            and not os.environ.get('HOROVOD_ALLOW_MIXED_GPU_IMPL')):
        raise DistutilsError(
            'You should not mix NCCL and MPI GPU due to a possible deadlock.\n'
            'If you\'re sure you want to mix them, set the '
            'HOROVOD_ALLOW_MIXED_GPU_IMPL environment variable to \'1\'.')

    MACROS = []
    INCLUDES = []
    SOURCES = [
        'horovod/common/common.cc', 'horovod/common/mpi_message.cc',
        'horovod/common/operations.cc', 'horovod/common/timeline.cc'
    ]
    COMPILE_FLAGS = cpp_flags + shlex.split(mpi_flags)
    LINK_FLAGS = link_flags + shlex.split(mpi_flags)
    LIBRARY_DIRS = []
    LIBRARIES = []

    if have_cuda:
        MACROS += [('HAVE_CUDA', '1')]
        INCLUDES += cuda_include_dirs
        LIBRARY_DIRS += cuda_lib_dirs
        LIBRARIES += ['cudart']

    if have_nccl:
        MACROS += [('HAVE_NCCL', '1')]
        INCLUDES += nccl_include_dirs
        LIBRARY_DIRS += nccl_lib_dirs
        LIBRARIES += nccl_libs

    if have_ddl:
        MACROS += [('HAVE_DDL', '1')]
        INCLUDES += ddl_include_dirs
        LIBRARY_DIRS += ddl_lib_dirs
        LIBRARIES += ['ddl', 'ddl_pack']

    if gpu_allreduce:
        MACROS += [('HOROVOD_GPU_ALLREDUCE', "'%s'" % gpu_allreduce[0])]

    if gpu_allgather:
        MACROS += [('HOROVOD_GPU_ALLGATHER', "'%s'" % gpu_allgather[0])]

    if gpu_broadcast:
        MACROS += [('HOROVOD_GPU_BROADCAST', "'%s'" % gpu_broadcast[0])]

    return dict(MACROS=MACROS,
                INCLUDES=INCLUDES,
                SOURCES=SOURCES,
                COMPILE_FLAGS=COMPILE_FLAGS,
                LINK_FLAGS=LINK_FLAGS,
                LIBRARY_DIRS=LIBRARY_DIRS,
                LIBRARIES=LIBRARIES)
コード例 #14
0
        # build the Request
        headers = {
            'Content-type': 'multipart/form-data; boundary=%s' % boundary,
            'Content-length': str(len(body)),
            'Authorization': auth
        }

        request = Request(self.repository, data=body, headers=headers)
        # send the data
        try:
            result = urlopen(request)
            status = result.getcode()
            reason = result.msg
            if self.show_response:
                msg = '\n'.join(('-' * 75, result.read(), '-' * 75))
                self.announce(msg, log.INFO)
        except socket.error, e:
            self.announce(str(e), log.ERROR)
            raise
        except HTTPError, e:
            status = e.code
            reason = e.msg

        if status == 200:
            self.announce('Server response (%s): %s' % (status, reason),
                          log.INFO)
        else:
            msg = 'Upload failed (%s): %s' % (status, reason)
            self.announce(msg, log.ERROR)
            raise DistutilsError(msg)
コード例 #15
0
ファイル: setup.py プロジェクト: houyb/py2exe2msi
import os, sys

from distutils.core import setup
from distutils.errors import DistutilsError

from py2exe2msi import __version__ as VERSION

PACKAGE_NAME = 'py2exe2msi'

if sys.version_info < (2, 6):
    raise DistutilsError('this package requires Python 2.6 or later')

setup(
    name=PACKAGE_NAME,
    version=VERSION,
    description=
    'An easy way to create Windows standalone applications in Python',
    author='Artem Andreev',
    author_email='*****@*****.**',
    url='http://code.google.com/p/py2exe2msi/',
    packages=['py2exe2msi'],
    long_description='''py2exe2msi is an extension to distutils which creates 
	MSI packages for py2exe-compiled applications''',
    classifiers=[
        'License :: OSI Approved', 'License :: OSI Approved :: BSD License',
        'Programming Language :: Python', 'Development Status :: 3 - Alpha',
        'Intended Audience :: Developers', 'Operating System :: Microsoft',
        'Operating System :: Microsoft :: Windows',
        'Topic :: Software Development',
        'Topic :: Software Development :: Build Tools', 'Topic :: System',
        'Topic :: System :: Archiving',
コード例 #16
0
 def run(self):
     raise DistutilsError("not supported on this version of python")
コード例 #17
0
ファイル: setup.py プロジェクト: danielsocials/python-trezor
 def run(self):
     raise DistutilsError(self.description)
コード例 #18
0
        def run(self):
            import subprocess
            import shutil
            import zipfile
            import os
            import urllib
            import StringIO
            from base64 import standard_b64encode
            import httplib
            import urlparse

            # Extract the package name from distutils metadata
            meta = self.distribution.metadata
            name = meta.get_name()

            # Run sphinx
            if os.path.exists('doc/_build'):
                shutil.rmtree('doc/_build')
            os.mkdir('doc/_build')

            p = subprocess.Popen(['make', 'html'], cwd='doc')
            exit = p.wait()
            if exit != 0:
                raise DistutilsError("sphinx-build failed")

            # Collect sphinx output
            if not os.path.exists('dist'):
                os.mkdir('dist')
            zf = zipfile.ZipFile('dist/%s-docs.zip' % (name, ),
                                 'w',
                                 compression=zipfile.ZIP_DEFLATED)

            for toplevel, dirs, files in os.walk('doc/_build/html'):
                for fn in files:
                    fullname = os.path.join(toplevel, fn)
                    relname = os.path.relpath(fullname, 'doc/_build/html')

                    print("%s -> %s" % (fullname, relname))

                    zf.write(fullname, relname)

            zf.close()

            # Upload the results, this code is based on the distutils
            # 'upload' command.
            content = open('dist/%s-docs.zip' % (name, ), 'rb').read()

            data = {
                ':action': 'doc_upload',
                'name': name,
                'content': ('%s-docs.zip' % (name, ), content),
            }
            auth = "Basic " + standard_b64encode(self.username + ":" +
                                                 self.password)

            boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
            sep_boundary = '\n--' + boundary
            end_boundary = sep_boundary + '--'
            body = StringIO.StringIO()
            for key, value in data.items():
                if not isinstance(value, list):
                    value = [value]

                for value in value:
                    if isinstance(value, tuple):
                        fn = ';filename="%s"' % (value[0])
                        value = value[1]
                    else:
                        fn = ''

                    body.write(sep_boundary)
                    body.write('\nContent-Disposition: form-data; name="%s"' %
                               key)
                    body.write(fn)
                    body.write("\n\n")
                    body.write(value)

            body.write(end_boundary)
            body.write('\n')
            body = body.getvalue()

            self.announce(
                "Uploading documentation to %s" % (self.repository, ),
                log.INFO)

            schema, netloc, url, params, query, fragments = \
                    urlparse.urlparse(self.repository)

            if schema == 'http':
                http = httplib.HTTPConnection(netloc)
            elif schema == 'https':
                http = httplib.HTTPSConnection(netloc)
            else:
                raise AssertionError("unsupported schema " + schema)

            data = ''
            loglevel = log.INFO
            try:
                http.connect()
                http.putrequest("POST", url)
                http.putheader('Content-type',
                               'multipart/form-data; boundary=%s' % boundary)
                http.putheader('Content-length', str(len(body)))
                http.putheader('Authorization', auth)
                http.endheaders()
                http.send(body)
            except socket.error:
                e = socket.exc_info()[1]
                self.announce(str(e), log.ERROR)
                return

            r = http.getresponse()
            if r.status in (200, 301):
                self.announce(
                    'Upload succeeded (%s): %s' % (r.status, r.reason),
                    log.INFO)
            else:
                self.announce('Upload failed (%s): %s' % (r.status, r.reason),
                              log.ERROR)

                print('-' * 75)
                print(r.read())
                print('-' * 75)
コード例 #19
0
    return sub.returncode, stdout, stderr


def distutils_exec_process(cmdline, silent, input=None, **kwargs):
    try:
        returncode, stdout, stderr = exec_process(cmdline, silent, input,
                                                  **kwargs)
    except OSError, e:
        if e.errno == 2:
            raise DistutilsError('"%s" is not present on this system' %
                                 cmdline[0])
        else:
            raise
    if returncode != 0:
        raise DistutilsError(
            'Got return value %d while executing "%s", stderr output was:\n%s'
            % (returncode, " ".join(cmdline), stderr.rstrip("\n")))
    return stdout


def get_makefile_variables(makefile):
    """Returns all variables in a makefile as a dict"""
    stdout = distutils_exec_process(
        [get_make_cmd(), "-f", makefile, "-pR", makefile], True)
    return dict(tup for tup in re.findall("(^[a-zA-Z]\w+)\s*:?=\s*(.*)$",
                                          stdout, re.MULTILINE))


def get_svn_repo_url(svn_dir):
    environment = dict((name, value) for name, value in os.environ.iteritems()
                       if name != 'LANG' and not name.startswith('LC_'))
コード例 #20
0
    def prepare_static_build_linux(self):
        self.openssl_version = os.environ.get('PYXMLSEC_OPENSSL_VERSION', '1.1.1g')
        self.libiconv_version = os.environ.get('PYXMLSEC_LIBICONV_VERSION', '1.16')
        self.libxml2_version = os.environ.get('PYXMLSEC_LIBXML2_VERSION', None)
        self.libxslt_version = os.environ.get('PYXMLSEC_LIBXLST_VERSION', None)
        self.zlib_version = os.environ.get('PYXMLSEC_ZLIB_VERSION', '1.2.11')
        self.xmlsec1_version = os.environ.get('PYXMLSEC_XMLSEC1_VERSION', '1.2.31')

        self.info('Settings:')
        self.info('{:20} {}'.format('Lib sources in:', self.libs_dir.absolute()))
        self.info('{:20} {}'.format('zlib version:', self.zlib_version))
        self.info('{:20} {}'.format('libiconv version:', self.libiconv_version))
        self.info('{:20} {}'.format('libxml2 version:', self.libxml2_version or 'unset, using latest'))
        self.info('{:20} {}'.format('libxslt version:', self.libxslt_version or 'unset, using latest'))
        self.info('{:20} {}'.format('xmlsec1 version:', self.xmlsec1_version))

        # fetch openssl
        openssl_tar = next(self.libs_dir.glob('openssl*.tar.gz'), None)
        if openssl_tar is None:
            self.info('OpenSSL source tar not found, downloading ...')
            openssl_tar = self.libs_dir / 'openssl.tar.gz'
            urlretrieve('https://www.openssl.org/source/openssl-{}.tar.gz'.format(self.openssl_version), str(openssl_tar))

        # fetch zlib
        zlib_tar = next(self.libs_dir.glob('zlib*.tar.gz'), None)
        if zlib_tar is None:
            self.info('zlib source tar not found, downloading ...')
            zlib_tar = self.libs_dir / 'zlib.tar.gz'
            urlretrieve('https://zlib.net/zlib-{}.tar.gz'.format(self.zlib_version), str(zlib_tar))

        # fetch libiconv
        libiconv_tar = next(self.libs_dir.glob('libiconv*.tar.gz'), None)
        if libiconv_tar is None:
            self.info('libiconv source tar not found, downloading ...')
            libiconv_tar = self.libs_dir / 'libiconv.tar.gz'
            urlretrieve(
                'https://ftp.gnu.org/pub/gnu/libiconv/libiconv-{}.tar.gz'.format(self.libiconv_version), str(libiconv_tar)
            )

        # fetch libxml2
        libxml2_tar = next(self.libs_dir.glob('libxml2*.tar.gz'), None)
        if libxml2_tar is None:
            self.info('Libxml2 source tar not found, downloading ...')
            if self.libxml2_version is None:
                url = 'http://xmlsoft.org/sources/LATEST_LIBXML2'
            else:
                url = 'http://xmlsoft.org/sources/libxml2-{}.tar.gz'.format(self.libxml2_version)
            libxml2_tar = self.libs_dir / 'libxml2.tar.gz'
            urlretrieve(url, str(libxml2_tar))

        # fetch libxslt
        libxslt_tar = next(self.libs_dir.glob('libxslt*.tar.gz'), None)
        if libxslt_tar is None:
            self.info('libxslt source tar not found, downloading ...')
            if self.libxslt_version is None:
                url = 'http://xmlsoft.org/sources/LATEST_LIBXSLT'
            else:
                url = 'http://xmlsoft.org/sources/libxslt-{}.tar.gz'.format(self.libxslt_version)
            libxslt_tar = self.libs_dir / 'libxslt.tar.gz'
            urlretrieve(url, str(libxslt_tar))

        # fetch xmlsec1
        xmlsec1_tar = next(self.libs_dir.glob('xmlsec1*.tar.gz'), None)
        if xmlsec1_tar is None:
            self.info('xmlsec1 source tar not found, downloading ...')
            url = 'http://www.aleksey.com/xmlsec/download/xmlsec1-{}.tar.gz'.format(self.xmlsec1_version)
            xmlsec1_tar = self.libs_dir / 'xmlsec1.tar.gz'
            urlretrieve(url, str(xmlsec1_tar))

        for file in (openssl_tar, zlib_tar, libiconv_tar, libxml2_tar, libxslt_tar, xmlsec1_tar):
            self.info('Unpacking {}'.format(file.name))
            try:
                with tarfile.open(str(file)) as tar:
                    tar.extractall(path=str(self.build_libs_dir))
            except EOFError:
                raise DistutilsError('Bad {} downloaded; remove it and try again.'.format(file.name))

        prefix_arg = '--prefix={}'.format(self.prefix_dir)

        cflags = ['-fPIC']
        env = os.environ.copy()
        if 'CFLAGS' in env:
            env['CFLAGS'].append(' '.join(cflags))
        else:
            env['CFLAGS'] = ' '.join(cflags)

        self.info('Building OpenSSL')
        openssl_dir = next(self.build_libs_dir.glob('openssl-*'))
        subprocess.check_output(['./config', prefix_arg, 'no-shared', '-fPIC'], cwd=str(openssl_dir), env=env)
        subprocess.check_output(['make', '-j{}'.format(multiprocessing.cpu_count() + 1)], cwd=str(openssl_dir), env=env)
        subprocess.check_output(
            ['make', '-j{}'.format(multiprocessing.cpu_count() + 1), 'install_sw'], cwd=str(openssl_dir), env=env
        )

        self.info('Building zlib')
        zlib_dir = next(self.build_libs_dir.glob('zlib-*'))
        subprocess.check_output(['./configure', prefix_arg], cwd=str(zlib_dir), env=env)
        subprocess.check_output(['make', '-j{}'.format(multiprocessing.cpu_count() + 1)], cwd=str(zlib_dir), env=env)
        subprocess.check_output(['make', '-j{}'.format(multiprocessing.cpu_count() + 1), 'install'], cwd=str(zlib_dir), env=env)

        self.info('Building libiconv')
        libiconv_dir = next(self.build_libs_dir.glob('libiconv-*'))
        subprocess.check_output(
            ['./configure', prefix_arg, '--disable-dependency-tracking', '--disable-shared'], cwd=str(libiconv_dir), env=env
        )
        subprocess.check_output(['make', '-j{}'.format(multiprocessing.cpu_count() + 1)], cwd=str(libiconv_dir), env=env)
        subprocess.check_output(
            ['make', '-j{}'.format(multiprocessing.cpu_count() + 1), 'install'], cwd=str(libiconv_dir), env=env
        )

        self.info('Building LibXML2')
        libxml2_dir = next(self.build_libs_dir.glob('libxml2-*'))
        subprocess.check_output(
            [
                './configure',
                prefix_arg,
                '--disable-dependency-tracking',
                '--disable-shared',
                '--enable-rebuild-docs=no',
                '--without-lzma',
                '--without-python',
                '--with-iconv={}'.format(self.prefix_dir),
                '--with-zlib={}'.format(self.prefix_dir),
            ],
            cwd=str(libxml2_dir),
            env=env,
        )
        subprocess.check_output(['make', '-j{}'.format(multiprocessing.cpu_count() + 1)], cwd=str(libxml2_dir), env=env)
        subprocess.check_output(
            ['make', '-j{}'.format(multiprocessing.cpu_count() + 1), 'install'], cwd=str(libxml2_dir), env=env
        )

        self.info('Building libxslt')
        libxslt_dir = next(self.build_libs_dir.glob('libxslt-*'))
        subprocess.check_output(
            [
                './configure',
                prefix_arg,
                '--disable-dependency-tracking',
                '--disable-shared',
                '--without-python',
                '--without-crypto',
                '--with-libxml-prefix={}'.format(self.prefix_dir),
            ],
            cwd=str(libxslt_dir),
            env=env,
        )
        subprocess.check_output(['make', '-j{}'.format(multiprocessing.cpu_count() + 1)], cwd=str(libxslt_dir), env=env)
        subprocess.check_output(
            ['make', '-j{}'.format(multiprocessing.cpu_count() + 1), 'install'], cwd=str(libxslt_dir), env=env
        )

        self.info('Building xmlsec1')
        if 'LDFLAGS' in env:
            env['LDFLAGS'].append(' -lpthread')
        else:
            env['LDFLAGS'] = '-lpthread'
        xmlsec1_dir = next(self.build_libs_dir.glob('xmlsec1-*'))
        subprocess.check_output(
            [
                './configure',
                prefix_arg,
                '--disable-shared',
                '--disable-gost',
                '--disable-crypto-dl',
                '--enable-static=yes',
                '--enable-shared=no',
                '--enable-static-linking=yes',
                '--with-default-crypto=openssl',
                '--with-openssl={}'.format(self.prefix_dir),
                '--with-libxml={}'.format(self.prefix_dir),
                '--with-libxslt={}'.format(self.prefix_dir),
            ],
            cwd=str(xmlsec1_dir),
            env=env,
        )
        subprocess.check_output(
            ['make', '-j{}'.format(multiprocessing.cpu_count() + 1)]
            + ['-I{}'.format(str(self.prefix_dir / 'include')), '-I{}'.format(str(self.prefix_dir / 'include' / 'libxml'))],
            cwd=str(xmlsec1_dir),
            env=env,
        )
        subprocess.check_output(
            ['make', '-j{}'.format(multiprocessing.cpu_count() + 1), 'install'], cwd=str(xmlsec1_dir), env=env
        )

        ext = self.ext_map['xmlsec']
        ext.define_macros = [
            ('__XMLSEC_FUNCTION__', '__func__'),
            ('XMLSEC_NO_SIZE_T', None),
            ('XMLSEC_NO_GOST', '1'),
            ('XMLSEC_NO_GOST2012', '1'),
            ('XMLSEC_NO_XKMS', '1'),
            ('XMLSEC_CRYPTO', '\\"openssl\\"'),
            ('XMLSEC_NO_CRYPTO_DYNAMIC_LOADING', '1'),
            ('XMLSEC_CRYPTO_OPENSSL', '1'),
            ('LIBXML_ICONV_ENABLED', 1),
            ('LIBXML_STATIC', '1'),
            ('LIBXSLT_STATIC', '1'),
            ('XMLSEC_STATIC', '1'),
            ('inline', '__inline'),
            ('UNICODE', '1'),
            ('_UNICODE', '1'),
        ]

        ext.include_dirs.append(str(self.prefix_dir / 'include'))
        ext.include_dirs.extend([str(p.absolute()) for p in (self.prefix_dir / 'include').iterdir() if p.is_dir()])

        ext.library_dirs = []
        ext.libraries = ['m', 'rt']
        extra_objects = [
            'libxmlsec1.a',
            'libxslt.a',
            'libxml2.a',
            'libz.a',
            'libxmlsec1-openssl.a',
            'libcrypto.a',
            'libiconv.a',
            'libxmlsec1.a',
        ]
        ext.extra_objects = [str(self.prefix_dir / 'lib' / o) for o in extra_objects]
コード例 #21
0
 def run(self):
     if not has_system_lib():
         raise DistutilsError(
             "This library is not usable in 'develop' mode when using the "
             'bundled libsecp256k1. See README for details.')
     _develop.run(self)
コード例 #22
0
    def run(self):
        ext = self.ext_map['xmlsec']
        self.debug = os.environ.get('PYXMLSEC_ENABLE_DEBUG', False)
        self.static = os.environ.get('PYXMLSEC_STATIC_DEPS', False)

        if self.static or sys.platform == 'win32':
            self.info('starting static build on {}'.format(sys.platform))
            buildroot = Path('build', 'tmp')

            self.prefix_dir = buildroot / 'prefix'
            self.prefix_dir.mkdir(parents=True, exist_ok=True)
            self.prefix_dir = self.prefix_dir.absolute()

            self.build_libs_dir = buildroot / 'libs'
            self.build_libs_dir.mkdir(exist_ok=True)

            self.libs_dir = Path(os.environ.get('PYXMLSEC_LIBS_DIR', 'libs'))
            self.libs_dir.mkdir(exist_ok=True)

            if sys.platform == 'win32':
                self.prepare_static_build_win()
            elif 'linux' in sys.platform:
                self.prepare_static_build_linux()
        else:
            import pkgconfig

            try:
                config = pkgconfig.parse('xmlsec1')
            except EnvironmentError:
                raise DistutilsError('Unable to invoke pkg-config.')
            except pkgconfig.PackageNotFoundError:
                raise DistutilsError('xmlsec1 is not installed or not in path.')

            if config is None or not config.get('libraries'):
                raise DistutilsError('Bad or incomplete result returned from pkg-config.')

            ext.define_macros.extend(config['define_macros'])
            ext.include_dirs.extend(config['include_dirs'])
            ext.library_dirs.extend(config['library_dirs'])
            ext.libraries.extend(config['libraries'])

        import lxml

        ext.include_dirs.extend(lxml.get_include())

        ext.define_macros.extend(
            [('MODULE_NAME', self.distribution.metadata.name), ('MODULE_VERSION', self.distribution.metadata.version)]
        )
        # escape the XMLSEC_CRYPTO macro value, see mehcode/python-xmlsec#141
        for (key, value) in ext.define_macros:
            if key == 'XMLSEC_CRYPTO' and not (value.startswith('"') and value.endswith('"')):
                ext.define_macros.remove((key, value))
                ext.define_macros.append((key, '"{0}"'.format(value)))
                break

        if sys.platform == 'win32':
            ext.extra_compile_args.append('/Zi')
        else:
            ext.extra_compile_args.extend(
                [
                    '-g',
                    '-std=c99',
                    '-fPIC',
                    '-fno-strict-aliasing',
                    '-Wno-error=declaration-after-statement',
                    '-Werror=implicit-function-declaration',
                ]
            )

        if self.debug:
            ext.extra_compile_args.append('-Wall')
            ext.extra_compile_args.append('-O0')
            ext.define_macros.append(('PYXMLSEC_ENABLE_DEBUG', '1'))
        else:
            ext.extra_compile_args.append('-Os')

        super(build_ext, self).run()
コード例 #23
0
ファイル: build_src.py プロジェクト: tongjuhua/FlipbookApp
    def f2py_sources(self, sources, extension):
        new_sources = []
        f2py_sources = []
        f_sources = []
        f2py_targets = {}
        target_dirs = []
        ext_name = extension.name.split('.')[-1]
        skip_f2py = 0

        for source in sources:
            (base, ext) = os.path.splitext(source)
            if ext == '.pyf':  # F2PY interface file
                if self.inplace:
                    target_dir = os.path.dirname(base)
                else:
                    target_dir = appendpath(self.build_src,
                                            os.path.dirname(base))
                if os.path.isfile(source):
                    name = get_f2py_modulename(source)
                    if name != ext_name:
                        raise DistutilsSetupError(
                            'mismatch of extension names: %s '
                            'provides %r but expected %r' %
                            (source, name, ext_name))
                    target_file = os.path.join(target_dir, name + 'module.c')
                else:
                    log.debug('  source %s does not exist: skipping f2py\'ing.' \
                              % (source))
                    name = ext_name
                    skip_f2py = 1
                    target_file = os.path.join(target_dir, name + 'module.c')
                    if not os.path.isfile(target_file):
                        log.warn('  target %s does not exist:\n   '\
                                 'Assuming %smodule.c was generated with '\
                                 '"build_src --inplace" command.' \
                                 % (target_file, name))
                        target_dir = os.path.dirname(base)
                        target_file = os.path.join(target_dir,
                                                   name + 'module.c')
                        if not os.path.isfile(target_file):
                            raise DistutilsSetupError("%r missing" %
                                                      (target_file, ))
                        log.info('   Yes! Using %r as up-to-date target.' \
                                 % (target_file))
                target_dirs.append(target_dir)
                f2py_sources.append(source)
                f2py_targets[source] = target_file
                new_sources.append(target_file)
            elif fortran_ext_match(ext):
                f_sources.append(source)
            else:
                new_sources.append(source)

        if not (f2py_sources or f_sources):
            return new_sources

        for d in target_dirs:
            self.mkpath(d)

        f2py_options = extension.f2py_options + self.f2py_opts

        if self.distribution.libraries:
            for name, build_info in self.distribution.libraries:
                if name in extension.libraries:
                    f2py_options.extend(build_info.get('f2py_options', []))

        log.info("f2py options: %s" % (f2py_options))

        if f2py_sources:
            if len(f2py_sources) != 1:
                raise DistutilsSetupError(
                    'only one .pyf file is allowed per extension module but got'\
                    ' more: %r' % (f2py_sources,))
            source = f2py_sources[0]
            target_file = f2py_targets[source]
            target_dir = os.path.dirname(target_file) or '.'
            depends = [source] + extension.depends
            if (self.force or newer_group(depends, target_file,'newer')) \
                   and not skip_f2py:
                log.info("f2py: %s" % (source))
                import numpy.f2py
                numpy.f2py.run_main(f2py_options +
                                    ['--build-dir', target_dir, source])
            else:
                log.debug("  skipping '%s' f2py interface (up-to-date)" %
                          (source))
        else:
            #XXX TODO: --inplace support for sdist command
            if is_sequence(extension):
                name = extension[0]
            else:
                name = extension.name
            target_dir = os.path.join(*([self.build_src]\
                                        +name.split('.')[:-1]))
            target_file = os.path.join(target_dir, ext_name + 'module.c')
            new_sources.append(target_file)
            depends = f_sources + extension.depends
            if (self.force or newer_group(depends, target_file, 'newer')) \
                   and not skip_f2py:
                log.info("f2py:> %s" % (target_file))
                self.mkpath(target_dir)
                import numpy.f2py
                numpy.f2py.run_main(f2py_options + ['--lower',
                                                '--build-dir',target_dir]+\
                                ['-m',ext_name]+f_sources)
            else:
                log.debug("  skipping f2py fortran files for '%s' (up-to-date)"\
                          % (target_file))

        if not os.path.isfile(target_file):
            raise DistutilsError("f2py target file %r not generated" %
                                 (target_file, ))

        target_c = os.path.join(self.build_src, 'fortranobject.c')
        target_h = os.path.join(self.build_src, 'fortranobject.h')
        log.info("  adding '%s' to sources." % (target_c))
        new_sources.append(target_c)
        if self.build_src not in extension.include_dirs:
            log.info("  adding '%s' to include_dirs." \
                     % (self.build_src))
            extension.include_dirs.append(self.build_src)

        if not skip_f2py:
            import numpy.f2py
            d = os.path.dirname(numpy.f2py.__file__)
            source_c = os.path.join(d, 'src', 'fortranobject.c')
            source_h = os.path.join(d, 'src', 'fortranobject.h')
            if newer(source_c, target_c) or newer(source_h, target_h):
                self.mkpath(os.path.dirname(target_c))
                self.copy_file(source_c, target_c)
                self.copy_file(source_h, target_h)
        else:
            if not os.path.isfile(target_c):
                raise DistutilsSetupError("f2py target_c file %r not found" %
                                          (target_c, ))
            if not os.path.isfile(target_h):
                raise DistutilsSetupError("f2py target_h file %r not found" %
                                          (target_h, ))

        for name_ext in ['-f2pywrappers.f', '-f2pywrappers2.f90']:
            filename = os.path.join(target_dir, ext_name + name_ext)
            if os.path.isfile(filename):
                log.info("  adding '%s' to sources." % (filename))
                f_sources.append(filename)

        return new_sources + f_sources
コード例 #24
0
ファイル: setup.py プロジェクト: theseusyang/AutoDL
def get_common_options(build_ext):
    cpp_flags = get_cpp_flags(build_ext)
    mpi_flags = get_mpi_flags()

    gpu_allreduce = os.environ.get('HOROVOD_GPU_ALLREDUCE')
    if gpu_allreduce and gpu_allreduce != 'MPI' and gpu_allreduce != 'NCCL':
        raise DistutilsError('HOROVOD_GPU_ALLREDUCE=%s is invalid, supported '
                             'values are "", "MPI", "NCCL".' % gpu_allreduce)

    gpu_allgather = os.environ.get('HOROVOD_GPU_ALLGATHER')
    if gpu_allgather and gpu_allgather != 'MPI' and gpu_allreduce != 'NCCL':
        raise DistutilsError('HOROVOD_GPU_ALLGATHER=%s is invalid, supported '
                             'values are "", "MPI", "NCCL".' % gpu_allgather)

    gpu_broadcast = os.environ.get('HOROVOD_GPU_BROADCAST')
    if gpu_broadcast and gpu_broadcast != 'MPI' and gpu_allreduce != 'NCCL':
        raise DistutilsError('HOROVOD_GPU_BROADCAST=%s is invalid, supported '
                             'values are "", "MPI", "NCCL".' % gpu_broadcast)

    if gpu_allreduce or gpu_allgather or gpu_broadcast:
        have_cuda = True
        cuda_include_dirs, cuda_lib_dirs = get_cuda_dirs(build_ext, cpp_flags)
    else:
        have_cuda = False
        cuda_include_dirs = cuda_lib_dirs = []

    if gpu_allreduce == 'NCCL':
        have_nccl = True
        nccl_include_dirs, nccl_lib_dirs = get_nccl_dirs(
            build_ext, cuda_include_dirs, cuda_lib_dirs, cpp_flags)
    else:
        have_nccl = False
        nccl_include_dirs = nccl_lib_dirs = []

    MACROS = []
    INCLUDES = []
    SOURCES = []
    COMPILE_FLAGS = cpp_flags + shlex.split(mpi_flags)
    LINK_FLAGS = shlex.split(mpi_flags)
    LIBRARY_DIRS = []
    LIBRARIES = []

    if have_cuda:
        MACROS += [('HAVE_CUDA', '1')]
        INCLUDES += cuda_include_dirs
        LIBRARY_DIRS += cuda_lib_dirs
        LIBRARIES += ['cudart']

    if have_nccl:
        MACROS += [('HAVE_NCCL', '1')]
        INCLUDES += nccl_include_dirs
        LINK_FLAGS += ['-Wl,--version-script=hide_nccl.lds']
        LIBRARY_DIRS += nccl_lib_dirs
        LIBRARIES += ['nccl_static']

    if gpu_allreduce:
        MACROS += [('HOROVOD_GPU_ALLREDUCE', "'%s'" % gpu_allreduce[0])]

    if gpu_allgather:
        MACROS += [('HOROVOD_GPU_ALLGATHER', "'%s'" % gpu_allgather[0])]

    if gpu_broadcast:
        MACROS += [('HOROVOD_GPU_BROADCAST', "'%s'" % gpu_broadcast[0])]

    return dict(MACROS=MACROS,
                INCLUDES=INCLUDES,
                SOURCES=SOURCES,
                COMPILE_FLAGS=COMPILE_FLAGS,
                LINK_FLAGS=LINK_FLAGS,
                LIBRARY_DIRS=LIBRARY_DIRS,
                LIBRARIES=LIBRARIES)
コード例 #25
0
def parse_requirement_arg(spec):
    try:
        return Requirement.parse(spec)
    except ValueError:
        raise DistutilsError(
            "Not a URL, existing file, or requirement spec: %r" % (spec, ))
コード例 #26
0
def get_common_options(build_ext):
    cpp_flags = get_cpp_flags(build_ext)
    link_flags = get_link_flags(build_ext)
    mpi_flags = get_mpi_flags()

    gpu_allreduce = os.environ.get('HOROVOD_GPU_ALLREDUCE')
    if gpu_allreduce and gpu_allreduce != 'MPI' and gpu_allreduce != 'NCCL' and \
       gpu_allreduce != 'DDL':
        raise DistutilsError('HOROVOD_GPU_ALLREDUCE=%s is invalid, supported '
                             'values are "", "MPI", "NCCL", "DDL".' %
                             gpu_allreduce)

    gpu_allgather = os.environ.get('HOROVOD_GPU_ALLGATHER')
    if gpu_allgather and gpu_allgather != 'MPI':
        raise DistutilsError('HOROVOD_GPU_ALLGATHER=%s is invalid, supported '
                             'values are "", "MPI".' % gpu_allgather)

    gpu_broadcast = os.environ.get('HOROVOD_GPU_BROADCAST')
    if gpu_broadcast and gpu_broadcast != 'MPI':
        raise DistutilsError('HOROVOD_GPU_BROADCAST=%s is invalid, supported '
                             'values are "", "MPI".' % gpu_broadcast)

    if gpu_allreduce or gpu_allgather or gpu_broadcast:
        have_cuda = True
        cuda_include_dirs, cuda_lib_dirs = get_cuda_dirs(build_ext, cpp_flags)
    else:
        have_cuda = False
        cuda_include_dirs = cuda_lib_dirs = []

    if gpu_allreduce == 'NCCL':
        have_nccl = True
        nccl_include_dirs, nccl_lib_dirs, nccl_libs = get_nccl_vals(
            build_ext, cuda_include_dirs, cuda_lib_dirs, cpp_flags)
    else:
        have_nccl = False
        nccl_include_dirs = nccl_lib_dirs = nccl_libs = []

    if gpu_allreduce == 'DDL':
        have_ddl = True
        ddl_include_dirs, ddl_lib_dirs = get_ddl_dirs(build_ext,
                                                      cuda_include_dirs,
                                                      cuda_lib_dirs, cpp_flags)
    else:
        have_ddl = False
        ddl_include_dirs = ddl_lib_dirs = []

    if (gpu_allreduce == 'NCCL'
            and (gpu_allgather == 'MPI' or gpu_broadcast == 'MPI')
            and not os.environ.get('HOROVOD_ALLOW_MIXED_GPU_IMPL')):
        raise DistutilsError(
            'You should not mix NCCL and MPI GPU due to a possible deadlock.\n'
            'If you\'re sure you want to mix them, set the '
            'HOROVOD_ALLOW_MIXED_GPU_IMPL environment variable to \'1\'.')

    MACROS = [('EIGEN_MPL2_ONLY', 1)]
    INCLUDES = [
        'third_party/eigen', 'third_party/lbfgs/include',
        'third_party/boost/assert/include', 'third_party/boost/config/include',
        'third_party/boost/core/include', 'third_party/boost/detail/include',
        'third_party/boost/iterator/include',
        'third_party/boost/lockfree/include', 'third_party/boost/mpl/include',
        'third_party/boost/parameter/include',
        'third_party/boost/predef/include',
        'third_party/boost/preprocessor/include',
        'third_party/boost/static_assert/include',
        'third_party/boost/type_traits/include',
        'third_party/boost/utility/include', 'third_party/flatbuffers/include'
    ]
    SOURCES = [
        'horovod/common/common.cc', 'horovod/common/fusion_buffer_manager.cc',
        'horovod/common/half.cc', 'horovod/common/message.cc',
        'horovod/common/mpi_context.cc', 'horovod/common/operations.cc',
        'horovod/common/parameter_manager.cc',
        'horovod/common/response_cache.cc', 'horovod/common/timeline.cc',
        'horovod/common/ops/collective_operations.cc',
        'horovod/common/ops/mpi_operations.cc',
        'horovod/common/ops/operation_manager.cc',
        'horovod/common/optim/bayesian_optimization.cc',
        'horovod/common/optim/gaussian_process.cc', 'horovod/common/logging.cc'
    ]
    COMPILE_FLAGS = cpp_flags + shlex.split(mpi_flags)
    LINK_FLAGS = link_flags + shlex.split(mpi_flags)
    LIBRARY_DIRS = []
    LIBRARIES = []

    if have_mlsl:
        MACROS += [('HAVE_MLSL', '1')]
        INCLUDES += [mlsl_root + '/intel64/include/']
        SOURCES += ['horovod/common/ops/mlsl_operations.cc']
        LIBRARY_DIRS += [mlsl_root + '/intel64/lib/']
        LINK_FLAGS += ['-lmlsl']

    if have_cuda:
        MACROS += [('HAVE_CUDA', '1')]
        INCLUDES += cuda_include_dirs
        SOURCES += [
            'horovod/common/ops/cuda_operations.cc',
            'horovod/common/ops/mpi_cuda_operations.cc'
        ]
        LIBRARY_DIRS += cuda_lib_dirs
        LIBRARIES += ['cudart']

    if have_nccl:
        MACROS += [('HAVE_NCCL', '1')]
        INCLUDES += nccl_include_dirs
        SOURCES += ['horovod/common/ops/nccl_operations.cc']
        LIBRARY_DIRS += nccl_lib_dirs
        LIBRARIES += nccl_libs

    if have_ddl:
        MACROS += [('HAVE_DDL', '1')]
        INCLUDES += ddl_include_dirs
        SOURCES += ['horovod/common/ops/ddl_operations.cc']
        LIBRARY_DIRS += ddl_lib_dirs
        LIBRARIES += ['ddl', 'ddl_pack']

    if gpu_allreduce:
        MACROS += [('HOROVOD_GPU_ALLREDUCE', "'%s'" % gpu_allreduce[0])]

    if gpu_allgather:
        MACROS += [('HOROVOD_GPU_ALLGATHER', "'%s'" % gpu_allgather[0])]

    if gpu_broadcast:
        MACROS += [('HOROVOD_GPU_BROADCAST', "'%s'" % gpu_broadcast[0])]

    return dict(MACROS=MACROS,
                INCLUDES=INCLUDES,
                SOURCES=SOURCES,
                COMPILE_FLAGS=COMPILE_FLAGS,
                LINK_FLAGS=LINK_FLAGS,
                LIBRARY_DIRS=LIBRARY_DIRS,
                LIBRARIES=LIBRARIES)
コード例 #27
0
long_description = """\
This package creates a quaternion type in python, and further enables numpy to create and manipulate arrays of
quaternions.  The usual algebraic operations (addition and multiplication) are available, along with numerous
properties like norm and various types of distance measures between two quaternions.  There are also
additional functions like "squad" and "slerp" interpolation, and conversions to and from axis-angle, matrix,
and Euler-angle representations of rotations.  The core of the code is written in C for speed.
"""

if __name__ == "__main__":
    import numpy
    from setuptools import setup, Extension
    # from distutils.core import setup, Extension
    from distutils.errors import DistutilsError
    if numpy.__dict__.get('quaternion') is not None:
        raise DistutilsError('The target NumPy already has a quaternion type')
    extension = Extension(
        name=
        'quaternion.numpy_quaternion',  # This is the name of the object file that will be compiled
        sources=['quaternion.c', 'numpy_quaternion.c'],
        extra_compile_args=['/O2' if on_windows else '-O3'],
        depends=['quaternion.c', 'quaternion.h', 'numpy_quaternion.c'],
        include_dirs=[numpy.get_include()])
    setup(
        name='numpy-quaternion',  # Uploaded to pypi under this name
        packages=['quaternion'],  # This is the actual package name
        package_dir={'quaternion': ''},
        ext_modules=[extension],
        version=version,
        install_requires=[
            'numpy>=1.13',
コード例 #28
0
            return local_open(url)
        try:
            return open_with_auth(url)
        except (ValueError, httplib.InvalidURL), v:
            msg = ' '.join([str(arg) for arg in v.args])
            if warning:
                self.warn(warning, msg)
            else:
                raise DistutilsError('%s %s' % (url, msg))
        except urllib2.HTTPError, v:
            return v
        except urllib2.URLError, v:
            if warning:
                self.warn(warning, v.reason)
            else:
                raise DistutilsError("Download error for %s: %s" %
                                     (url, v.reason))
        except httplib.BadStatusLine, v:
            if warning:
                self.warn(warning, v.line)
            else:
                raise DistutilsError('%s returned a bad status line. '
                                     'The server might be down, %s' % \
                                             (url, v.line))
        except httplib.HTTPException, v:
            if warning:
                self.warn(warning, v)
            else:
                raise DistutilsError("Download error for %s: %s" % (url, v))

    def _download_url(self, scheme, url, tmpdir):
        # Determine download filename
コード例 #29
0
    def build_extension(self, ext):
        sources = ext.sources
        if sources is None or not is_sequence(sources):
            raise DistutilsSetupError(
                ("in 'ext_modules' option (extension '%s'), " +
                 "'sources' must be present and must be " +
                 "a list of source filenames") % ext.name)
        sources = list(sources)

        if not sources:
            return

        fullname = self.get_ext_fullname(ext.name)
        if self.inplace:
            modpath = fullname.split('.')
            package = '.'.join(modpath[0:-1])
            base = modpath[-1]
            build_py = self.get_finalized_command('build_py')
            package_dir = build_py.get_package_dir(package)
            ext_filename = os.path.join(package_dir,
                                        self.get_ext_filename(base))
        else:
            ext_filename = os.path.join(self.build_lib,
                                        self.get_ext_filename(fullname))
        depends = sources + ext.depends

        if not (self.force or newer_group(depends, ext_filename, 'newer')):
            log.debug("skipping '%s' extension (up-to-date)", ext.name)
            return
        else:
            log.info("building '%s' extension", ext.name)

        extra_args = ext.extra_compile_args or []
        macros = ext.define_macros[:]
        for undef in ext.undef_macros:
            macros.append((undef, ))

        c_sources, cxx_sources, f_sources, fmodule_sources = \
            filter_sources(ext.sources)

        if self.compiler.compiler_type == 'msvc':
            if cxx_sources:
                # Needed to compile kiva.agg._agg extension.
                extra_args.append('/Zm1000')
            # this hack works around the msvc compiler attributes
            # problem, msvc uses its own convention :(
            c_sources += cxx_sources
            cxx_sources = []

        # Set Fortran/C++ compilers for compilation and linking.
        if ext.language == 'f90':
            fcompiler = self._f90_compiler
        elif ext.language == 'f77':
            fcompiler = self._f77_compiler
        else:  # in case ext.language is c++, for instance
            fcompiler = self._f90_compiler or self._f77_compiler
        if fcompiler is not None:
            fcompiler.extra_f77_compile_args = (
                ext.extra_f77_compile_args or []) if hasattr(
                    ext, 'extra_f77_compile_args') else []
            fcompiler.extra_f90_compile_args = (
                ext.extra_f90_compile_args or []) if hasattr(
                    ext, 'extra_f90_compile_args') else []
        cxx_compiler = self._cxx_compiler

        # check for the availability of required compilers
        if cxx_sources and cxx_compiler is None:
            raise DistutilsError("extension %r has C++ sources"
                                 "but no C++ compiler found" % (ext.name))
        if (f_sources or fmodule_sources) and fcompiler is None:
            raise DistutilsError("extension %r has Fortran sources "
                                 "but no Fortran compiler found" % (ext.name))
        if ext.language in ['f77', 'f90'] and fcompiler is None:
            self.warn("extension %r has Fortran libraries "
                      "but no Fortran linker found, using default linker" %
                      (ext.name))
        if ext.language == 'c++' and cxx_compiler is None:
            self.warn("extension %r has C++ libraries "
                      "but no C++ linker found, using default linker" %
                      (ext.name))

        kws = {'depends': ext.depends}
        output_dir = self.build_temp

        include_dirs = ext.include_dirs + get_numpy_include_dirs()

        dispatch_objects = []
        if not self.disable_optimization:
            dispatch_sources = [
                c_sources.pop(c_sources.index(src)) for src in c_sources[:]
                if src.endswith(".dispatch.c")
            ]
            if dispatch_sources:
                if not self.inplace:
                    build_src = self.get_finalized_command(
                        "build_src").build_src
                else:
                    build_src = None
                dispatch_objects = self.compiler_opt.try_dispatch(
                    dispatch_sources,
                    output_dir=output_dir,
                    src_dir=build_src,
                    macros=macros,
                    include_dirs=include_dirs,
                    debug=self.debug,
                    extra_postargs=extra_args,
                    **kws)
            extra_args_baseopt = extra_args + self.compiler_opt.cpu_baseline_flags(
            )
        else:
            extra_args_baseopt = extra_args
            macros.append(("NPY_DISABLE_OPTIMIZATION", 1))

        c_objects = []
        if c_sources:
            log.info("compiling C sources")
            c_objects = self.compiler.compile(
                c_sources,
                output_dir=output_dir,
                macros=macros,
                include_dirs=include_dirs,
                debug=self.debug,
                extra_postargs=extra_args_baseopt,
                **kws)
        c_objects.extend(dispatch_objects)

        if cxx_sources:
            log.info("compiling C++ sources")
            c_objects += cxx_compiler.compile(cxx_sources,
                                              output_dir=output_dir,
                                              macros=macros,
                                              include_dirs=include_dirs,
                                              debug=self.debug,
                                              extra_postargs=extra_args,
                                              **kws)

        extra_postargs = []
        f_objects = []
        if fmodule_sources:
            log.info("compiling Fortran 90 module sources")
            module_dirs = ext.module_dirs[:]
            module_build_dir = os.path.join(
                self.build_temp,
                os.path.dirname(self.get_ext_filename(fullname)))

            self.mkpath(module_build_dir)
            if fcompiler.module_dir_switch is None:
                existing_modules = glob('*.mod')
            extra_postargs += fcompiler.module_options(module_dirs,
                                                       module_build_dir)
            f_objects += fcompiler.compile(fmodule_sources,
                                           output_dir=self.build_temp,
                                           macros=macros,
                                           include_dirs=include_dirs,
                                           debug=self.debug,
                                           extra_postargs=extra_postargs,
                                           depends=ext.depends)

            if fcompiler.module_dir_switch is None:
                for f in glob('*.mod'):
                    if f in existing_modules:
                        continue
                    t = os.path.join(module_build_dir, f)
                    if os.path.abspath(f) == os.path.abspath(t):
                        continue
                    if os.path.isfile(t):
                        os.remove(t)
                    try:
                        self.move_file(f, module_build_dir)
                    except DistutilsFileError:
                        log.warn('failed to move %r to %r' %
                                 (f, module_build_dir))
        if f_sources:
            log.info("compiling Fortran sources")
            f_objects += fcompiler.compile(f_sources,
                                           output_dir=self.build_temp,
                                           macros=macros,
                                           include_dirs=include_dirs,
                                           debug=self.debug,
                                           extra_postargs=extra_postargs,
                                           depends=ext.depends)

        if f_objects and not fcompiler.can_ccompiler_link(self.compiler):
            unlinkable_fobjects = f_objects
            objects = c_objects
        else:
            unlinkable_fobjects = []
            objects = c_objects + f_objects

        if ext.extra_objects:
            objects.extend(ext.extra_objects)
        extra_args = ext.extra_link_args or []
        libraries = self.get_libraries(ext)[:]
        library_dirs = ext.library_dirs[:]

        linker = self.compiler.link_shared_object
        # Always use system linker when using MSVC compiler.
        if self.compiler.compiler_type in ('msvc', 'intelw', 'intelemw'):
            # expand libraries with fcompiler libraries as we are
            # not using fcompiler linker
            self._libs_with_msvc_and_fortran(fcompiler, libraries,
                                             library_dirs)

        elif ext.language in ['f77', 'f90'] and fcompiler is not None:
            linker = fcompiler.link_shared_object
        if ext.language == 'c++' and cxx_compiler is not None:
            linker = cxx_compiler.link_shared_object

        if fcompiler is not None:
            objects, libraries = self._process_unlinkable_fobjects(
                objects, libraries, fcompiler, library_dirs,
                unlinkable_fobjects)

        linker(objects,
               ext_filename,
               libraries=libraries,
               library_dirs=library_dirs,
               runtime_library_dirs=ext.runtime_library_dirs,
               extra_postargs=extra_args,
               export_symbols=self.get_export_symbols(ext),
               debug=self.debug,
               build_temp=self.build_temp,
               target_lang=ext.language)
コード例 #30
0
ファイル: setup.py プロジェクト: dominhduy/photoplace
 def create_binaries(self, py_files, extensions, dlls):
     if self.gtkdir == None:
         gtkdir = None
         # Fetchs gtk2 path from registry
         import _winreg
         import msvcrt
         try:
             k = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,
                                 "Software\\GTK\\2.0")
         except EnvironmentError:
             raise DistutilsError(
                 'Could not find gtk+ 2.2 Runtime Environmet to copy libraries and data files.'
             )
         else:
             dir = _winreg.QueryValueEx(k, "Path")
             os.environ['PATH'] += ";%s/lib;%s/bin" % (dir[0], dir[0])
             gtkdir = dir[0]
     else:
         gtkdir = self.gtkdir
     _py2exe.create_binaries(self, py_files, extensions, dlls)
     if self.gtkdata:
         for f in find_files(gtkdir, 'lib',
                             ['*.dll.a', '*.def', '*.lib'],
                             ['pkgconfig', 'glib-2.0']):
             dest_dir = os.path.dirname(os.path.join(self.exe_dir, f))
             if not os.path.exists(dest_dir):
                 os.makedirs(dest_dir)
             self.copy_file(os.path.join(gtkdir, f),
                            os.path.join(self.exe_dir, f),
                            preserve_mode=0)
         for f in find_files(gtkdir, 'etc', ['*.*~']):
             dest_dir = os.path.dirname(os.path.join(self.exe_dir, f))
             if not os.path.exists(dest_dir):
                 os.makedirs(dest_dir)
             self.copy_file(os.path.join(gtkdir, f),
                            os.path.join(self.exe_dir, f),
                            preserve_mode=0)
         # GTK locales
         for lang in self.languages:
             glob_dir = os.path.join(gtkdir, 'share\\locale', lang,
                                     'LC_MESSAGES\\*.mo')
             for f in glob.glob(glob_dir):
                 for llang in glob.glob(
                         os.path.join(gtkdir, 'share\\locale', lang)):
                     country = os.path.basename(llang)
                     dest_dir = os.path.join(self.exe_dir,
                                             'share\\locale', country,
                                             'LC_MESSAGES')
                     if not os.path.exists(dest_dir):
                         os.makedirs(dest_dir)
                     self.copy_file(f, dest_dir)
         self.copy_file(os.path.join(gtkdir,
                                     'share\\locale\\locale.alias'),
                        os.path.join(self.exe_dir, 'share\\locale'),
                        preserve_mode=0)
         # GTK Themes
         for f in find_files(gtkdir, 'share\\themes', ['*.*~']):
             dest_dir = os.path.dirname(os.path.join(self.exe_dir, f))
             if not os.path.exists(dest_dir):
                 os.makedirs(dest_dir)
             self.copy_file(os.path.join(gtkdir, f),
                            os.path.join(self.exe_dir, f),
                            preserve_mode=0)
     if self.gtktheme != None:
         print("*** Enabling additional themes for gtk+ ***")
         for f in find_files(self.gtkthemes, 'share\\themes', ['*.*~']):
             dest_dir = os.path.dirname(os.path.join(self.exe_dir, f))
             if not os.path.exists(dest_dir):
                 os.makedirs(dest_dir)
             self.copy_file(os.path.join(self.gtkthemes, f),
                            os.path.join(self.exe_dir, f),
                            preserve_mode=0)
         for f in find_files(self.gtkthemes, 'lib\\gtk-2.0', ['*.*~']):
             dest_dir = os.path.dirname(os.path.join(self.exe_dir, f))
             if not os.path.exists(dest_dir):
                 os.makedirs(dest_dir)
             self.copy_file(os.path.join(self.gtkthemes, f),
                            os.path.join(self.exe_dir, f),
                            preserve_mode=0)
         gtktheme_dir = os.path.join(self.exe_dir, 'etc', 'gtk-2.0')
         if not os.path.exists(gtktheme_dir):
             os.makedirs(gtktheme_dir)
         file = open(os.path.join(gtktheme_dir, 'gtkrc'), 'w')
         file.write("# Generated from setup.py\n")
         file.write('gtk-theme-name = "%s"\n' % self.gtktheme)
         file.close()
     # addons
     if self.addons != None:
         print("*** Copying core addons ***")
         build = self.get_finalized_command('build')
         orig_addon_dir = os.path.join(build.build_base, self.addons)
         for f in find_files(orig_addon_dir, ''):
             dest_dir = os.path.dirname(
                 os.path.join(self.exe_dir, self.addondir, f))
             if not os.path.exists(dest_dir):
                 os.makedirs(dest_dir)
             self.copy_file(os.path.join(orig_addon_dir, f),
                            dest_dir,
                            preserve_mode=0)