Ejemplo n.º 1
0
 def run(self):
     install_custom_sqlite3()
     build_req_wheels()
     print("Running wheel...")
     bdist_wheel_.run(self)
     print("Done running wheel")
     copy_custom_sqlite3()
Ejemplo n.º 2
0
 def run(self):
     if is_manylinux:
         source = 'onnxruntime/capi/onnxruntime_pybind11_state.so'
         dest = 'onnxruntime/capi/onnxruntime_pybind11_state_manylinux1.so'
         logger.info('copying %s -> %s', source, dest)
         copyfile(source, dest)
         result = subprocess.run(['patchelf', '--print-needed', dest], check=True, stdout=subprocess.PIPE, universal_newlines=True)
         cuda_dependencies = ['libcublas.so', 'libcudnn.so', 'libcudart.so', 'libcurand.so', 'libcufft.so', 'libnvToolsExt.so']
         to_preload = []
         args = ['patchelf', '--debug']
         for line in result.stdout.split('\n'):
             for dependency in cuda_dependencies:
                 if dependency in line:
                     to_preload.append(line)
                     args.extend(['--remove-needed', line])
         args.append(dest)
         if len(to_preload) > 0:
             subprocess.run(args, check=True, stdout=subprocess.PIPE)
         self._rewrite_ld_preload(to_preload)
     _bdist_wheel.run(self)
     if is_manylinux:
         file = glob(path.join(self.dist_dir, '*linux*.whl'))[0]
         logger.info('repairing %s for manylinux1', file)
         try:
             subprocess.run(['auditwheel', 'repair', '-w', self.dist_dir, file], check=True, stdout=subprocess.PIPE)
         finally:
             logger.info('removing %s', file)
             remove(file)
Ejemplo n.º 3
0
 def run(self):
     for dep in self.distribution.install_requires:
         install_cmd = "pip install {} --disable-pip-version-check --no-cache-dir".format(dep)
         if self.pip_args is not None:
             install_cmd += ' ' + self.pip_args
         os.system(install_cmd)
     bdist_wheel.run(self)
Ejemplo n.º 4
0
 def run(self):
     if not is_configured:
         print('running config')
         configure_bdist(self)
         print_config()
     self.run_command("build")
     _bdist_wheel.run(self)
Ejemplo n.º 5
0
 def run(self):
     # Ensure that there is a basic library build for bdist_egg to pull from.
     self.run_command("build")
     
     _cleanup_symlinks(self)
     
     # Run the default bdist_wheel command
     orig_bdist_wheel.run(self)
Ejemplo n.º 6
0
    def run(self):
        global update_link, app_version
        if self.update_link is not None:
            update_link = self.update_link

        if self.version is not None:
            app_version = self.version
        bdist_wheel.run(self)
Ejemplo n.º 7
0
 def run(self):
     build_ext = self.reinitialize_command('build_ext',
                                           reinit_subcommands=True)
     path = self.bdist_dir
     path = path.replace("build/", "")
     path = path.replace("build\\", "")
     setattr(build_ext, 'install_dir', path)
     bdist_wheel.run(self)
Ejemplo n.º 8
0
        def run(self):
            # Ensure that there is a basic library build for bdist_egg to pull from.
            self.run_command("build")

            _cleanup_symlinks(self)

            # Run the default bdist_wheel command
            orig_bdist_wheel.run(self)
Ejemplo n.º 9
0
 def run(self):
     # Ensure the binding file exist when running wheel build
     from llvmlite.utils import get_library_files
     build_library_files(self.dry_run)
     self.distribution.package_data.update({
         "llvmlite.binding": get_library_files(),
     })
     # Run wheel build command
     bdist_wheel.run(self)
Ejemplo n.º 10
0
 def run(self):
     if not (download_and_install_wheel()):
         custom_compile(THIRD_PARTY, INTERNAL)
         build_req_wheels()
         open(BUILT_LOCAL, 'w+').close()
     print("Running wheel...")
     bdist_wheel_.run(self)
     print("Done running wheel")
     copy_custom_compile()
Ejemplo n.º 11
0
 def run(self):
     if not(download_and_install_wheel()):
         install_custom_sqlite3()
         build_req_wheels()
         open(BUILT_LOCAL, 'w+').close()
     print("Running wheel...")
     bdist_wheel_.run(self)
     print("Done running wheel")
     copy_custom_sqlite3()
Ejemplo n.º 12
0
 def run(self):
     global build_dir
     log.debug('_bdist_wheel run')
     self.distribution.ext_modules = [gip_module] + swig_modules
     self.run_command('build_ext')
     bdist_wheel.run(self)
     update_lib_path_mac(
         os.path.join(build_dir, gip_module._file_name),
         modpath=build_dir
     )
Ejemplo n.º 13
0
 def run(self):
     # Ensure the binding file exist when running wheel build
     from llvmlite.utils import get_library_files
     # Turn on -fPIC for wheel building on Linux
     pic = sys.platform.startswith('linux')
     build_library_files(self.dry_run, pic=pic)
     self.distribution.package_data.update({
         "llvmlite.binding": get_library_files(),
     })
     # Run wheel build command
     bdist_wheel.run(self)
Ejemplo n.º 14
0
 def run(self):
     if platform.system() == "Darwin":
         os.environ['MACOSX_DEPLOYMENT_TARGET'] = '10.10'
     _bdist_wheel.run(self)
     impl_tag, abi_tag, plat_tag = self.get_tag()
     archive_basename = "{}-{}-{}-{}".format(self.wheel_dist_name, impl_tag,
                                             abi_tag, plat_tag)
     wheel_path = os.path.join(self.dist_dir, archive_basename + '.whl')
     if platform.system() == "Darwin":
         from delocate.delocating import delocate_wheel
         delocate_wheel(wheel_path)
Ejemplo n.º 15
0
 def run(self):
     print("creating VERSION file")
     if os.path.exists('VERSION'):
         os.remove('VERSION')
     version = get_version()
     version_file = open('VERSION', 'w')
     version_file.write(version)
     version_file.close()
     bdist_wheel.run(self)
     print("removing VERSION file")
     if os.path.exists('VERSION'):
         os.remove('VERSION')
    def run(self):
        """Build the wheel packages."""
        install = self.reinitialize_command("install", reinit_subcommands=True)
        install.with_mysql_capi = self.with_mysql_capi
        install.with_protobuf_include_dir = self.with_protobuf_include_dir
        install.with_protobuf_lib_dir = self.with_protobuf_lib_dir
        install.with_protoc = self.with_protoc
        install.extra_compile_args = self.extra_compile_args
        install.extra_link_args = self.extra_link_args
        install.static = self.static
        install.is_wheel = True
        self.run_command("install")
        self.skip_build = True
        if self.distribution.data_files:
            # Copy data_files before bdist_wheel.run()
            for directory, files in self.distribution.data_files:
                dst = os.path.join(install.build_lib, directory)
                mkpath(dst)
                for filename in files:
                    src = os.path.join(os.getcwd(), filename)
                    log.info("copying {0} -> {1}".format(src, dst))
                    shutil.copy(src, dst)
            # Don't include data_files in wheel
            self.distribution.data_files = None

        # Create wheel
        bdist_wheel.run(self)

        # Remove build folder
        if not self.keep_temp:
            build = self.get_finalized_command("build")
            remove_tree(build.build_base, dry_run=self.dry_run)
            mysql_vendor = os.path.join(os.getcwd(), "mysql-vendor")
            if platform.system() == "Darwin" and os.path.exists(mysql_vendor):
                remove_tree(mysql_vendor)
            elif os.name == "nt":
                if ARCH_64BIT:
                    libraries = [
                        "libmysql.dll", "libssl-1_1-x64.dll",
                        "libcrypto-1_1-x64.dll"
                    ]
                else:
                    libraries = [
                        "libmysql.dll", "libssl-1_1.dll", "libcrypto-1_1.dll"
                    ]
                for filename in libraries:
                    dll_file = os.path.join(os.getcwd(), filename)
                    if os.path.exists(dll_file):
                        os.unlink(dll_file)
Ejemplo n.º 17
0
        def run(self):
            # Ensure the binding file exist when running wheel build
            from llvmlite.utils import get_library_files
            build_library_files(self.dry_run)
            for fn in get_library_files():
                path = os.path.join('llvmlite', 'binding', fn)
                if not os.path.isfile(path):
                    raise RuntimeError("missing {}".format(path))

            self.distribution.package_data.update({
                "llvmlite.binding":
                get_library_files(),
            })
            # Run wheel build command
            bdist_wheel.run(self)
Ejemplo n.º 18
0
 def run(self):
     if learning_engine_enabled():
         graphlearn_shared_lib = "libgraphlearn_shared.so"
         if not os.path.isfile(
                 os.path.join(
                     repo_root,
                     "..",
                     "learning_engine",
                     "graph-learn",
                     "built",
                     "lib",
                     graphlearn_shared_lib,
                 )):
             raise ValueError(
                 "You must build the graphlearn library at first")
     self.run_command("build_proto")
     bdist_wheel.run(self)
Ejemplo n.º 19
0
        def run(self):
            # The options are stored in global variables:
            opt = cl_opt()
            #   mpi  : build with MPI support (boolean).
            opt['mpi'] = self.mpi is not None
            #   gpu  : compile for AMD/NVIDIA GPUs and choose compiler (string).
            opt['gpu'] = "none" if self.gpu is None else self.gpu
            #   vec  : generate SIMD vectorized kernels for CPU micro-architecture (boolean).
            opt['vec'] = self.vec is not None
            #   arch : target CPU micro-architecture (string).
            opt['arch'] = "native" if self.arch is None else self.arch
            #   neuroml : compile with neuroml support for morphologies.
            opt['neuroml'] = self.neuroml is not None
            #   bundled : use bundled/git-submoduled 3rd party libraries.
            #             By default use bundled libs.
            opt['bundled'] = self.sysdeps is None

            bdist_wheel.run(self)
Ejemplo n.º 20
0
    def run(self):
        self.universal = True
        bdist_wheel.run(self)
        
        import imageio
        
        # Get base zipfile
        import zipfile
        distdir = op.join(THIS_DIR, 'dist')
        basename = 'imageio-%s-py2.py3-none-any.whl' % __version__
        zipfilename = op.join(distdir, basename)
        assert op.isfile(zipfilename)
        
        # Create/clean build dir
        build_dir = op.join(distdir, 'temp')
        if op.isdir(build_dir):
            shutil.rmtree(build_dir)
        os.mkdir(build_dir)
        
        # Extract, get resource dir
        with zipfile.ZipFile(zipfilename, 'r') as tf:
            tf.extractall(build_dir)
        resource_dir = op.join(build_dir, 'imageio', 'resources')
        assert os.path.isdir(resource_dir), build_dir

        # Prepare the libs resource directory with cross-platform
        # resources, so we can copy these for each platform
        _set_crossplatform_resources(imageio.core.resource_dirs()[0])
        
        # Create archives
        dist_files = self.distribution.dist_files
        while dist_files:
            dist_files.pop()
        pyver = 'cp26.cp27.cp33.cp34.cp35'
        for plat in ['win64', 'osx64']:
            fname = self._create_wheels_for_platform(resource_dir,
                                                     plat, pyver)
            dist_files.append(('bdist_wheel', 'any', 'dist/'+fname))

        # Clean up
        shutil.rmtree(build_dir)
        os.remove('dist/' + basename)
Ejemplo n.º 21
0
 def run(self):
     if platform.system() == "Darwin":
         os.environ['MACOSX_DEPLOYMENT_TARGET'] = '10.15'
     _bdist_wheel.run(self)
     impl_tag, abi_tag, plat_tag = self.get_tag()
     archive_basename = "{}-{}-{}-{}".format(self.wheel_dist_name, impl_tag,
                                             abi_tag, plat_tag)
     wheel_path = os.path.join(self.dist_dir, archive_basename + '.whl')
     if platform.system() == "Darwin":
         from delocate.delocating import delocate_wheel
         delocate_wheel(wheel_path)
     elif platform.system() == "Linux":
         # This only works for manylinux
         if 'AUDITWHEEL_PLAT' in os.environ:
             from auditwheel.repair import repair_wheel
             repair_wheel(wheel_path,
                          abi=os.environ['AUDITWHEEL_PLAT'],
                          lib_sdir=".libs",
                          out_dir=self.dist_dir,
                          update_tags=True)
Ejemplo n.º 22
0
    def run(self):
        self.universal = True
        bdist_wheel.run(self)

        import imageio

        # Get base zipfile
        import zipfile
        distdir = op.join(THIS_DIR, 'dist')
        basename = 'imageio-%s-py2.py3-none-any.whl' % __version__
        zipfilename = op.join(distdir, basename)
        assert op.isfile(zipfilename)

        # Create/clean build dir
        build_dir = op.join(distdir, 'temp')
        if op.isdir(build_dir):
            shutil.rmtree(build_dir)
        os.mkdir(build_dir)

        # Extract, get resource dir
        with zipfile.ZipFile(zipfilename, 'r') as tf:
            tf.extractall(build_dir)
        resource_dir = op.join(build_dir, 'imageio', 'resources')
        assert os.path.isdir(resource_dir), build_dir

        # Prepare the libs resource directory with cross-platform
        # resources, so we can copy these for each platform
        _set_crossplatform_resources(imageio.core.resource_dirs()[0])

        # Create archives
        dist_files = self.distribution.dist_files
        while dist_files:
            dist_files.pop()
        pyver = 'cp26.cp27.cp33.cp34.cp35'
        for plat in ['win64', 'osx64']:
            fname = self._create_wheels_for_platform(resource_dir, plat, pyver)
            dist_files.append(('bdist_wheel', 'any', 'dist/' + fname))

        # Clean up
        shutil.rmtree(build_dir)
        os.remove('dist/' + basename)
Ejemplo n.º 23
0
 def run(self):
     """Do nothing so the command intentionally fails."""
     _build_paneljs()
     bdist_wheel.run(self)
Ejemplo n.º 24
0
 def run(self):
     bdist_wheel.run(self)
Ejemplo n.º 25
0
 def run(self):
     self.distribution.ext_modules = swig_modules
     self.run_command('build_ext')
     bdist_wheel.run(self)
Ejemplo n.º 26
0
 def run(self):
     if not self.distribution.install_requires:
         self.distribution.install_requires = []
     self.distribution.install_requires.append(
         "{}>=2.0.0".format(self.azure_namespace_package))
     bdist_wheel.run(self)
Ejemplo n.º 27
0
 def run(self):
     download_library(self)
     _bdist_wheel.run(self)
Ejemplo n.º 28
0
 def run(self):
     Helper.propagate_options(self, "bdist_wheel", "build_ext")
     _bdist_wheel.run(self)
Ejemplo n.º 29
0
 def run(self):
     _bdist_wheel.run(self)
     assert False, "bdist install is not supported, use source install"
     '''
Ejemplo n.º 30
0
 def run(self):
     self.run_command('build_ext')
     return _bdist_wheel.run(self)
Ejemplo n.º 31
0
 def run(self):
     self.run_command('build_frontend')
     _bdist_wheel.run(self)
Ejemplo n.º 32
0
 def run(self):
     download_library(self)
     _bdist_wheel.run(self)
Ejemplo n.º 33
0
 def run(self):
     with inject_version_info():
         # Super class is an old-style class, so we use old-style
         # "super" call.
         bdist_wheel.run(self)
Ejemplo n.º 34
0
Archivo: setup.py Proyecto: Eh2406/orca
 def run(self):
     build_js_bundle()
     bdist_wheel.run(self)
Ejemplo n.º 35
0
 def run(self):
     generate_proto("./pfpdb/PFPSimDebugger.proto")
     bdist_wheel.run(self)
Ejemplo n.º 36
0
        def run(self):
            if is_manylinux:
                source = "onnxruntime/capi/onnxruntime_pybind11_state.so"
                dest = "onnxruntime/capi/onnxruntime_pybind11_state_manylinux1.so"
                logger.info("copying %s -> %s", source, dest)
                copyfile(source, dest)
                result = subprocess.run(
                    ["patchelf", "--print-needed", dest], check=True, stdout=subprocess.PIPE, universal_newlines=True
                )
                dependencies = [
                    "librccl.so",
                    "libamdhip64.so",
                    "librocblas.so",
                    "libMIOpen.so",
                    "libhsa-runtime64.so",
                    "libhsakmt.so",
                ]
                to_preload = []
                to_preload_cuda = []
                to_preload_tensorrt = []
                cuda_dependencies = []
                args = ["patchelf", "--debug"]
                for line in result.stdout.split("\n"):
                    for dependency in dependencies:
                        if dependency in line:
                            to_preload.append(line)
                            args.extend(["--remove-needed", line])
                args.append(dest)
                if len(args) > 3:
                    subprocess.run(args, check=True, stdout=subprocess.PIPE)

                dest = "onnxruntime/capi/libonnxruntime_providers_" + ("rocm.so" if is_rocm else "cuda.so")
                if path.isfile(dest):
                    result = subprocess.run(
                        ["patchelf", "--print-needed", dest],
                        check=True,
                        stdout=subprocess.PIPE,
                        universal_newlines=True,
                    )
                    cuda_dependencies = [
                        "libcublas.so",
                        "libcublasLt.so",
                        "libcudnn.so",
                        "libcudart.so",
                        "libcurand.so",
                        "libcufft.so",
                        "libnvToolsExt.so",
                        "libcupti.so",
                    ]
                    rocm_dependencies = [
                        "librccl.so",
                        "libamdhip64.so",
                        "librocblas.so",
                        "libMIOpen.so",
                        "libhsa-runtime64.so",
                        "libhsakmt.so",
                    ]
                    args = ["patchelf", "--debug"]
                    for line in result.stdout.split("\n"):
                        for dependency in cuda_dependencies + rocm_dependencies:
                            if dependency in line:
                                if dependency not in to_preload:
                                    to_preload_cuda.append(line)
                                args.extend(["--remove-needed", line])
                    args.append(dest)
                    if len(args) > 3:
                        subprocess.run(args, check=True, stdout=subprocess.PIPE)

                dest = "onnxruntime/capi/libonnxruntime_providers_" + ("migraphx.so" if is_rocm else "tensorrt.so")
                if path.isfile(dest):
                    result = subprocess.run(
                        ["patchelf", "--print-needed", dest],
                        check=True,
                        stdout=subprocess.PIPE,
                        universal_newlines=True,
                    )
                    tensorrt_dependencies = ["libnvinfer.so", "libnvinfer_plugin.so", "libnvonnxparser.so"]
                    args = ["patchelf", "--debug"]
                    for line in result.stdout.split("\n"):
                        for dependency in cuda_dependencies + tensorrt_dependencies:
                            if dependency in line:
                                if dependency not in (to_preload + to_preload_cuda):
                                    to_preload_tensorrt.append(line)
                                args.extend(["--remove-needed", line])
                    args.append(dest)
                    if len(args) > 3:
                        subprocess.run(args, check=True, stdout=subprocess.PIPE)

                dest = "onnxruntime/capi/libonnxruntime_providers_openvino.so"
                if path.isfile(dest):
                    subprocess.run(
                        ["patchelf", "--set-rpath", "$ORIGIN", dest, "--force-rpath"],
                        check=True,
                        stdout=subprocess.PIPE,
                        universal_newlines=True,
                    )

                self._rewrite_ld_preload(to_preload)
                self._rewrite_ld_preload_cuda(to_preload_cuda)
                self._rewrite_ld_preload_tensorrt(to_preload_tensorrt)
            _bdist_wheel.run(self)
            if is_manylinux and not disable_auditwheel_repair and not is_openvino:
                assert self.dist_dir is not None
                file = glob(path.join(self.dist_dir, "*linux*.whl"))[0]
                logger.info("repairing %s for manylinux1", file)
                try:
                    subprocess.run(
                        ["auditwheel", "repair", "-w", self.dist_dir, file], check=True, stdout=subprocess.PIPE
                    )
                finally:
                    logger.info("removing %s", file)
                    remove(file)
Ejemplo n.º 37
0
 def run(self):
     self.run_command('build_frontend')
     _bdist_wheel.run(self)
Ejemplo n.º 38
0
 def run(self):
     self.run_command('build_ext')
     return _bdist_wheel.run(self)
Ejemplo n.º 39
0
 def run(self):
     self.run_command('build')
     # patch_cython_binary()
     native_bdist_wheel.run(self)
Ejemplo n.º 40
0
 def do_run(self):
     bdist_wheel.run(self)
Ejemplo n.º 41
0
 def run(self):
     self.distribution.ext_modules = [gip_module] + swig_modules
     self.run_command("build_ext")
     bdist_wheel.run(self)
Ejemplo n.º 42
0
 def run(self):
     call(["pip install -r ./requirements.txt --no-clean"], shell=True)
     self.execute(_post_install, (), msg="Installing nltk sets!")
     _bdist_wheel.run(self)
Ejemplo n.º 43
0
 def do_run(self):
     bdist_wheel.run(self)