def run(self): init = path.join(self.build_lib, 'git', '__init__.py') if path.exists(init): os.unlink(init) _build_py.run(self) _stamp_version(init) self.byte_compile([init])
def run(self): if not check_dependencies_once(): return # Add software that requires Numpy to be installed. if is_Numpy_installed(): self.packages.extend(NUMPY_PACKAGES) build_py.run(self)
def run(self): lib_extensions = [".so", ".dylib", ".dll"] module_name = "pyted" build_dir = os.path.abspath(".") lib_dir = os.path.join(build_dir, "python") lib_base = os.path.join(lib_dir, module_name) lib_file = [ lib_base + e for e in lib_extensions if os.path.isfile(lib_base + e) ][0] if not self.dry_run: target_dir = self.build_lib print "target_dir: " + target_dir module_dir = os.path.join(target_dir, module_name) # make sure the module dir exists self.mkpath(module_dir) # copy our library to the module dir self.copy_file(lib_file, module_dir) # run parent implementation build_py.run(self)
def run(self): """ During building, adds a variable with the complete version (from git describe) to fess/__init__.py. """ try: outfile = self.get_module_outfile(self.build_lib, ["fess"], "__init__") os.remove( outfile ) #If we have an old version, delete it, so _build_py will copy the original version into the build directory. except: log.exception("") pass # Superclass build print("Running _build_py") _build_py.run(self) print("Postprocessing") outfile = self.get_module_outfile(self.build_lib, ["fess"], "__init__") # Apped the version number to init.py with open(outfile, "a") as of: log.info("ernwin_version %s", ernwin_version) of.write( '\n__complete_version__ = "{}"'.format(ernwin_version))
def run(self): build_py.run(self) # locate .version in the new build/ directory and replace it with an updated value target_version_file = join(self.build_lib, self.distribution.metadata.name, ".version") print("UPDATING {0}".format(target_version_file)) with open(target_version_file, 'w') as f: f.write(self.distribution.metadata.version)
def run(self): uis = [] for filename in os.listdir(os.path.join("FluidNexus", "ui")): if filename.endswith(".ui"): uis.append(os.path.join("FluidNexus", "ui", filename)) for ui in uis: out = ui.replace(".ui", "UI.py") if (os_name == "windows"): command = ["pyuic4.bat", ui, "-o", out] else: command = ["pyuic4", ui, "-o", out] # For some reason pyuic4 doesn't want to work here... subprocess.call(command) self.byte_compile(out) res = "FluidNexus/ui/FluidNexus_rc.py" if (os_name == "windows"): command = ["pyrcc4.exe", "FluidNexus/ui/res/FluidNexus.qrc", "-o", res] else: command = ["pyrcc4", "FluidNexus/ui/res/FluidNexus.qrc", "-o", res] subprocess.call(command) # Removing the language conversion for now #if (os_name != "windows"): # regen_messages() _build_py.run(self)
def run(self): os.environ['CC'] = 'gcc' os.environ['CXX'] = 'g++' call(["mkdir", "-p", "build"]) call(["cmake", "-H.", "-Bbuild"]) call(["make", "-Cbuild", "install"]) _build_py.run(self)
def run(self): _build_py.run(self) if platform.system() == 'Windows': import os try: import winbuild except: self.announce("Could not find an microsoft compiler for supporting windows process injection", 2) return #can fail ? dirs = [x for x in self.get_data_files() if x[0] == 'pyrasite'][0] srcfile = os.path.join(dirs[1], 'win', 'inject_python.cpp') out32exe = os.path.join(dirs[2], 'win', 'inject_python_32.exe') out64exe = os.path.join(dirs[2], 'win', 'inject_python_64.exe') try: os.makedirs(os.path.dirname(out32exe)) except: pass try: winbuild.compile(srcfile, out32exe, 'x86') except: self.announce("Could not find an x86 microsoft compiler for supporting injection to 32 bit python instances", 2) try: winbuild.compile(srcfile, out64exe, 'x64') except: self.announce("Could not find an x64 microsoft compiler for supporting injection to 64 bit python instances", 2)
def run(self): if not check_dependencies_once(): return # Add software that requires Numpy to be installed. if is_Numpy_installed(): self.packages.extend(NUMPY_PACKAGES) build_py.run(self) # In addition to installing the data files, we also need to make # sure that they are copied to the build directory. Otherwise, # the unit tests will fail because they cannot find the data files # in the build directory. # This is taken care of automatically in Python 2.4 or higher by # using package_data. import glob data_files = self.distribution.data_files for entry in data_files: if type(entry) is not type(""): raise ValueError, "data_files must be strings" # Unix- to platform-convention conversion entry = os.sep.join(entry.split("/")) filenames = glob.glob(entry) for filename in filenames: dst = os.path.join(self.build_lib, filename) dstdir = os.path.split(dst)[0] self.mkpath(dstdir) self.copy_file(filename, dst)
def run(self): lib_extensions = [".so", ".dylib", ".dll"] module_name = "pycmc" source_dir = os.path.abspath(".") build_dir = os.path.abspath("build_pycmc") lib_dir = os.path.join(build_dir, "python") lib_base = os.path.join(lib_dir, module_name) if not self.dry_run: # create our shared object file call([ "./compile_wrapper.sh", source_dir, build_dir, 'rename_pycmc_lib' ]) target_dir = self.build_lib print("target_dir: " + target_dir) module_dir = os.path.join(target_dir, module_name) # make sure the module dir exists self.mkpath(module_dir) # copy our library to the module dir print(lib_base) lib_file = [ lib_base + e for e in lib_extensions if os.path.isfile(lib_base + e) ][0] self.copy_file(lib_file, module_dir) # run parent implementation build_py.run(self)
def run(self): if not self.dry_run: from packaging import version target_dir = os.path.join(self.build_lib, 'brevitas', 'function') self.mkpath(target_dir) apply_template(target_dir, version) build_py.run(self)
def run(self): if not self.dry_run: self.mkpath(self.build_lib) # Compile up our C header here and insert it as a string # into legate_core_cffi.py so that it is installed with # the python library directly root_dir = os.path.dirname(os.path.realpath(__file__)) header_src = os.path.join(root_dir, "src", "legate_c.h") output_dir = os.path.join(root_dir, "legate", "core") include_dir = os.path.join(args.prefix, "include") header = subprocess.check_output([ os.getenv("CC", "gcc"), "-E", "-DLEGATE_USE_PYTHON_CFFI", "-I" + str(include_dir), "-P", header_src, ]).decode("utf-8") libpath = os.path.join(args.prefix, "lib") with open(os.path.join(output_dir, "install_info.py.in")) as f: content = f.read() content = content.format(header=repr(header), libpath=repr(libpath)) with open(os.path.join(output_dir, "install_info.py"), "wb") as f: f.write(content.encode("utf-8")) build_py.run(self)
def run(self): if not self._dry_run: # build stan model and output pickled file. target_dir = os.path.join(self.build_lib, PKL_STAN_MODEL_BASE_DIR) self.mkpath(target_dir) build_and_output_stan_model(target_dir) build_py.run(self)
def run(self): lib_extensions = [ ".so", ".dylib", ".dll" ] module_name = "pylp" source_dir = os.path.abspath(".") build_dir = os.path.abspath("build_pylp") lib_dir = os.path.join(build_dir, "python") lib_base = os.path.join(lib_dir, module_name) if not self.dry_run: # create our shared object file call(["./compile_wrapper.sh", source_dir, build_dir, 'rename_pylp_lib']) target_dir = self.build_lib print("target_dir: " + target_dir) module_dir = os.path.join(target_dir, module_name) # make sure the module dir exists self.mkpath(module_dir) print(lib_base) lib_files = [ lib_base + e for e in lib_extensions if os.path.isfile(lib_base + e) ] if not lib_files: raise RuntimeError("library not found, build incomplete") lib_file = lib_files[0] # copy our library to the module dir self.copy_file(lib_file, module_dir) # run parent implementation build_py.run(self)
def run(self): # importing this forces ply to generate parsetab/lextab import eulxml.xpath.core generate_catalog_if_needed() build_py.run(self)
def run(self): self.run_command("build_mod") build_dir = op.join(self.build_lib, 'hnn_core', 'mod') mod_path = op.join(op.dirname(__file__), 'hnn_core', 'mod') shutil.copytree(mod_path, build_dir) build_py.run(self)
def run(self): subprocess.check_output(["pyside-uic", "couchman/forms/MainWindow.ui", "-o", "couchman/UI/UI_MainWindow.py"]) subprocess.check_output(["pyside-uic", "couchman/forms/DB_Manager.ui", "-o", "couchman/UI/UI_DocManager.py"]) subprocess.check_output(["pyside-uic", "couchman/forms/Server.ui", "-o", "couchman/UI/UI_New_Server.py"]) subprocess.check_output(["pyside-uic", "couchman/forms/Tasks.ui", "-o", "couchman/UI/UI_New_Task.py"]) subprocess.check_output(["pyside-uic", "couchman/forms/WorkersWindow.ui", "-o", "couchman/UI/UI_WorkersWindow.py"]) subprocess.check_output(["pyside-uic", "couchman/forms/ReplicationWindow.ui", "-o", "couchman/UI/UI_ReplicationWindow.py"]) _build_py.run(self)
def run(self): if not self.dry_run: target_dir = os.path.join(self.build_lib, 'xndtools', 'kernel_generator') self.mkpath(target_dir) for fn in data_files: copyfile(fn, os.path.join(target_dir, os.path.basename(fn))) build_py.run(self)
def run(self): # Generate necessary .proto file if it doesn't exist. generate_proto('tensorflow_model_analysis/proto/config.proto', False) # Generate necessary .proto file if it doesn't exist. generate_proto('tensorflow_model_analysis/proto/metrics_for_slice.proto', False) # _build_py is an old-style class, so super() doesn't work. _build_py.run(self)
def run(self): # Generate necessary .proto file if it doesn't exist. generate_proto("../src/google/protobuf/descriptor.proto") generate_proto("../src/google/protobuf/compiler/plugin.proto") # Make sure google.protobuf.compiler is a valid package. open('google/protobuf/compiler/__init__.py', 'a').close() # _build_py is an old-style class, so super() doesn't work. _build_py.run(self)
def run(self): # Download images and libs import imageio resource_dir = imageio.core.resource_dirs()[0] _set_crossplatform_resources(resource_dir) _set_platform_resources(resource_dir, imageio.core.get_platform()) # Build as normal build_py.run(self)
def run(self): if not self.dry_run: target_dir = os.path.join(self.build_lib, 'tlds') self.mkpath(target_dir) with open(os.path.join(target_dir, '_data.py'), 'w') as f: f.write('tld_set = set(%s)\n' % (tlds, )) build_py.run(self)
def run(self): build_py.run(self) config_dest = Path( self.build_lib) / 'pykg_config' / 'install_config.py' log.info("creating configuration file at %s", config_dest) if self.with_pc_path: config_dest.write_text('pc_path = "' + self.with_pc_path + '"\n') else: config_dest.write_text('pc_path = None\n')
def run(self): # honor the --dry-run flag if not self.dry_run: target_dir = os.path.join(self.build_lib, 'tinder_api') self.mkpath(target_dir) generate_api_endpoints_py_content(target_dir) # distutils uses old-style classes, so no super() build_py.run(self)
def run(self): build_py.run(self) # distutils uses old-style classes, so no super() version = calculate_version() print('build_py_copy_version using __version__ = "{0}"'.format(version)) if not self.dry_run: import os.path for package in self.packages: with open(os.path.join(self.build_lib, os.path.join(*package.split('.')), '_version.py'), 'w') as fobj: fobj.write('__version__ = "{0}"'.format(version))
def run(self): # Create a file populated with one version variable dist = self.distribution fp = open(dist.version_file, 'w') try: fp.write('__version__ = "%s"' % dist.get_version()) finally: fp.close() build_py.run(self)
def run(self): # Do the default actions. base.run(self) config_file = os.path.join(self.build_lib, 'Synopsis', 'config.py') self.announce("adjusting config parameters") reset_config_variables(config_file, prefix=os.getcwd(), revision=self.distribution.revision)
def run(self, *args, **kwargs): _build_py.run(self, *args, **kwargs) make_uis = MakeUis(self.distribution) qt4gui_path = os.path.join(self.get_package_dir("openmolar"), "qt4gui") make_uis.DEST_FOLDER = os.path.join(qt4gui_path, "compiled_uis") make_uis.RESOURCE_FILE = os.path.join(qt4gui_path, "resources_rc.py") make_uis.run(*args, **kwargs) info("build_py completed")
def run(self): # honor the --dry-run flag if not self.dry_run: outloc = self.build_lib outloc = os.path.join(outloc, 'ismrmrd/xsd/ismrmrdschema') generate_schema(schema_file, config_file, outloc) # distutils uses old-style classes, so no super() build_py.run(self)
def run(self): build_py.run(self) config_dest = os.path.join(self.build_lib, 'pykg_config', 'install_config.py') log.info("creating configuration file at %s", config_dest) with open(config_dest, 'w') as f: if self.with_pc_path: f.write('pc_path = "' + self.with_pc_path + '"\n') else: f.write('pc_path = None\n')
def run(self): # honor the --dry-run flag if not self.dry_run: if not os.path.exists(CONFIG_FILE): print 'creating %s' % CONFIG_FILE with open(CONFIG_FILE, 'w') as f: f.write(generate_content()) # distutils uses old-style classes, so no super() build_py.run(self)
def run(self): # Preparering rtcprof_python.py for Windows if os_is() == "win32": rtcprof_dir = os.path.join("OpenRTM_aist", "utils", "rtcprof/") self.copy_file(os.path.join(rtcprof_dir, "rtcprof.py"), os.path.join(rtcprof_dir, "rtcprof_python.py")) _build_py.run(self) # copying OpenRTM-aist.pth file self.copy_file(os.path.join(".", "OpenRTM-aist.pth"), self.build_lib, preserve_mode=False)
def run(self): if self.distribution.pure: modulepolicy = 'py' else: modulepolicy = 'c' with open("mercurial/__modulepolicy__.py", "w") as f: f.write('# this file is autogenerated by setup.py\n') f.write('modulepolicy = "%s"\n' % modulepolicy) build_py.run(self)
def run(self): # Copy .py files and build, as usual build_py.run(self) # Copy data files for data_file in self.package_data_files: outfile = os.path.join(self.build_lib, data_file) self.copy_file(data_file, outfile, preserve_mode=0) executable = (os.stat(data_file).st_mode & 0o111) != 0 if executable: os.chmod(outfile, os.stat(outfile).st_mode | 0o111)
def run(self): print "Hello" # honor the --dry-run flag if not self.dry_run: os.system('pyxbgen -u ' + schema_file + ' -m ismrmrd_xsd --binding-root="' + self.build_lib + '"') # distutils uses old-style classes, so no super() build_py.run(self)
def run(self): # Copy .py files and build, as usual build_py.run(self) # Copy data files for data_file in self.package_data_files: outfile = os.path.join(self.build_lib, data_file) self.copy_file(data_file, outfile, preserve_mode=0) executable = (os.stat(data_file).st_mode & 0111) != 0 if executable: os.chmod(outfile, os.stat(outfile).st_mode | 0111)
def run(self): # honor the --dry-run flag if not self.dry_run: target_dir = os.path.join(self.build_lib, 'yt') src_dir = os.getcwd() changeset = get_mercurial_changeset_id(src_dir) self.mkpath(target_dir) with open(os.path.join(target_dir, '__hg_version__.py'), 'w') as fobj: fobj.write("hg_version = '%s'\n" % changeset) build_py.run(self)
def run(self): target_dir = os.path.join(self.build_lib, 'pygo_plugin/_goplugin') GopyGenTool.build_lib_target = target_dir if not self.dry_run: self.mkpath(target_dir) self.run_command('grpc') self.run_command('gopy_build') build_py.run(self)
def run(self): # Generate option_extension.proto file. protoc_command = [ 'protoc', '-I../build/include/', '-I/usr/include', '--python_out=.', '../build/include/dccl/option_extensions.proto' ] if subprocess.call(protoc_command) != 0: sys.exit(-1) # _build_py is an old-style class, so super() doesn't work. _build_py.run(self)
def run (self): _build_py.run(self) if version == "unknown": print "RapidSMS version unknown! Is git in your path?" else: vstring = "VERSION = '%s'" % version vfilename = os.path.join(self.build_lib, "rapidsms", "__version__.py") vfile = file(vfilename, "w") print >>vfile, vstring print "setting %s in %s" % (vstring, vfilename)
def run(self): if not self.dry_run: target_path = os.path.join(self.build_lib, 'psycopg2ct') self.mkpath(target_path) with open(os.path.join(target_path, '_config.py'), 'w') as fh: fh.write('# Auto-generated by setup.py\n') fh.write('PG_LIBRARY = "%s"\n' % self.libpq_path) fh.write('PG_VERSION = %s\n' % self.libpq_version) _build_py.run(self)
def run(self): versions = get_versions(verbose=True) _build_py.run(self) # now locate _version.py in the new build/ directory and replace it # with an updated value target_versionfile = os.path.join(self.build_lib, versionfile_build) print("UPDATING %s" % target_versionfile) os.unlink(target_versionfile) f = open(target_versionfile, "w") f.write(SHORT_VERSION_PY % versions) f.close()
def run(self): root = get_root() cfg = get_config_from_root(root) versions = get_versions() _build_py.run(self) # now locate _version.py in the new build/ directory and replace # it with an updated value if cfg.versionfile_build: target_versionfile = os.path.join(self.build_lib, cfg.versionfile_build) print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, versions)
def run(self): # Do the default actions. base.run(self) config_file = os.path.join(self.build_lib, 'qm', 'config.py') self.announce("adjusting config parameters") reset_config_variables(config_file, version=self.distribution.get_version(), prefix=os.getcwd(), extension_path=os.path.join('qm', 'test', 'classes'))
def run(self): make_info_module(packname, version) build_py.run(self) try: shutil.copy('extras/pyrocko', '/etc/bash_completion.d/pyrocko') print 'Installing pyrocko bash_completion...' except IOError as e: import errno if e.errno in (errno.EACCES, errno.ENOENT): print e else: raise e
def run(self): from PyQt4 import QtCore pyuic = QtCore.QLibraryInfo.location(QtCore.QLibraryInfo.BinariesPath) pyuic = os.path.join(pyuic, 'pyuic4') folder = os.path.join(self.build_lib, 'beem/ui') self.mkpath(folder) for f in glob.glob('ui/*.ui'): out = os.path.join(folder, os.path.splitext(os.path.basename(f))[0] + '_ui.py') print('creating ' + out) os.system(pyuic + ' ' + f + ' -o ' + out) build_py.run(self)
def run(self): # Build cmap directory try: os.makedirs(cmapdir) except OSError as exception: if exception.errno != errno.EEXIST: raise conv_cmap.convert(cmapdir, 'Adobe-CNS1', 'cmaprsrc/cid2code_Adobe_CNS1.txt', ['cp950', 'big5']) conv_cmap.convert(cmapdir, 'Adobe-GB1', 'cmaprsrc/cid2code_Adobe_GB1.txt', ['cp936', 'gb2312']) conv_cmap.convert(cmapdir, 'Adobe-Japan1', 'cmaprsrc/cid2code_Adobe_Japan1.txt', ['cp932', 'euc-jp']) conv_cmap.convert(cmapdir, 'Adobe-Korea1', 'cmaprsrc/cid2code_Adobe_Korea1.txt', ['cp949', 'euc-kr']) build_py.run(self) # Continue with regular build
def run(self): old = log.set_threshold(log.ERROR) # Avoid "__init__.py not found" warning # Copy .py files and build, as usual build_py.run(self) log.set_threshold(old) # Copy data files for data_file in self.package_data_files: outfile = os.path.join(self.build_lib, data_file) self.copy_file(data_file, outfile, preserve_mode=0) executable = (os.stat(data_file).st_mode & 0o111) != 0 if executable: os.chmod(outfile, os.stat(outfile).st_mode | 0o111)
def run(self): # first let the real build_py routine do its thing build_py.run(self) # then compile the extensions into the just-built module self.spawn(['/usr/bin/python', 'app/deps/build.py', abspath(self.build_lib)]) # include some ui resources for running a script from the command line rsrc_dir = '%s/plotdevice/rsrc'%self.build_lib self.mkpath(rsrc_dir) self.copy_file("app/Resources/colors.json", '%s/colors.json'%rsrc_dir) self.spawn(['/usr/bin/ibtool','--compile', '%s/viewer.nib'%rsrc_dir, "app/Resources/English.lproj/PlotDeviceScript.xib"]) self.copy_file("app/Resources/PlotDeviceFile.icns", '%s/viewer.icns'%rsrc_dir)
def run(self): BuildPyCommand.run(self) # create data dirs data_dir = osp.join(self.build_lib, 'fcn/_data') if not osp.exists(data_dir): os.makedirs(data_dir) # download chainermodel to the data dir output = osp.join(data_dir, 'fcn8s_from_caffe.chainermodel') url = 'https://drive.google.com/uc?id=0B9P1L--7Wd2vTXU0QzUwSkVwOFk' cmd = "gdown -q '{0}' -O {1}".format(url, output) print("Downloading '{0}' by command '{1}'".format(output, cmd)) subprocess.check_call(shlex.split(cmd)) BuildPyCommand.run(self)
def run(self): build_py.run(self) defaults_py_out = os.path.join( self.build_lib, "suricata", "config", "defaults.py") if not os.path.exists(defaults_py_out): # Must be an out of tree build, find defaults.py. defaults_py_in = os.path.join( self.build_lib, "..", "suricata", "config", "defaults.py") if os.path.exists(defaults_py_in): shutil.copy(defaults_py_in, defaults_py_out) else: print("error: failed to find defaults.py") sys.exit(1)
def run(self): # honor the --dry-run flag if not self.dry_run: outloc = self.get_package_dir('ismrmrd') modname = 'xsd' modfile = os.path.join(outloc, '%s.py' % modname) generate_schema(schema_file, modname, outloc) with open(modfile, 'a') as f: f.write('\nimport pyxb.utils.domutils\n' + 'pyxb.utils.domutils.BindingDOMSupport.SetDefaultNamespace(Namespace)\n') # distutils uses old-style classes, so no super() build_py.run(self)