def run(self): try: import shutil shutil.copyfile('.libs/libgumbo.so', 'python/gumbo/libgumbo.so') sdist.run(self) except IOError as e: print(e)
def run(self): sdist.run(self) # it would be nice to clean up MANIFEST.in and plotdevice.egg-info here, # but we'll see if the upload command depends on them... remove_tree('plotdevice.egg-info') os.unlink('MANIFEST.in')
def run(self): run_make_files( make_docs=not self.no_make_docs, make_ui_files=not self.no_make_ui_files, force_compile_protos=not self.no_compile_protos, sync_artifacts=not self.no_sync_artifacts) sdist.run(self)
def run(self): global version global release # Create a development release string for later use git_head = subprocess.Popen("git log -1 --pretty=format:%h", shell=True, stdout=subprocess.PIPE).communicate()[0].strip() date = time.strftime("%Y%m%d%H%M%S", time.gmtime()) git_release = "%sgit%s" % (date, git_head) # Expand macros in pyrax.spec.in spec_in = open('pyrax.spec.in', 'r') spec = open('pyrax.spec', 'w') for line in spec_in.xreadlines(): if "@VERSION@" in line: line = line.replace("@VERSION@", version) elif "@RELEASE@" in line: # If development release, include date+githash in %{release} if release.startswith('0'): release += '.' + git_release line = line.replace("@RELEASE@", release) spec.write(line) spec_in.close() spec.close() # Run parent constructor _sdist.run(self)
def run(self): # try to run prep if needed try: prep() except: pass _sdist.run(self)
def run(self): input_dir = './docs/sphinx' build_doctree_dir = './build/doctrees' build_output_dir = './build/man' output_dir = DOC_MAN_PATH if os.path.exists(build_doctree_dir): shutil.rmtree(build_doctree_dir) if os.path.exists(build_output_dir): shutil.rmtree(build_output_dir) # sphinx doc generation sphinx.build_main(['sphinx-build', '-c', input_dir, '-b', 'man', '-T', '-d', build_doctree_dir, # input dir input_dir, # output dir build_output_dir]) # copy to docs folder if os.path.exists(output_dir): shutil.rmtree(output_dir) shutil.copytree(build_output_dir, output_dir) # actual sdist sdist.run(self)
def run(self): if git_version: subprocess.Popen( "sed -ri 's/VERSION = (.+)/VERSION = \"%s\"/' %s/__init__.py" % (git_version, name), shell=True ).communicate() sdist.run(self)
def run(self): if os.path.isdir('.git'): git_log_gnu = 'git log --format="%ai %aN %n%n%x09* %s%d%n"' changelog = run_git_command(git_log_gnu) mailmap = parse_mailmap() with open("ChangeLog", "w") as changelog_file: changelog_file.write(str_dict_replace(changelog, mailmap)) sdist.run(self)
def run(self): try: import shutil shutil.copyfile('.libs/' + _name_of_lib, 'python/gumbo/' + _name_of_lib) sdist.run(self) except IOError as e: print(e)
def run(self): if self.disabledl : print("Automatic downloading of DShield and Spamhaus databases is disabled.") print("As a result, they won't be included in the generated source distribution.") else: self._downloadDatabase("DShield", "http://www.dshield.org/ipsascii.html?limit=10000", "preludecorrelator/plugins/dshield.dat") self._downloadDatabase("Spamhaus", "http://www.spamhaus.org/drop/drop.lasso", "preludecorrelator/plugins/spamhaus_drop.dat") sdist.run(self)
def run(self): try: from Cython.Build import cythonize cythonize(os.path.join('pyhsmm','**','*.pyx')) except: warn('Failed to generate extension files from Cython sources') finally: _sdist.run(self)
def run(self): if not self.dry_run: with open(SKIP_CYTHON_FILE, 'w') as flag_file: flag_file.write('COMPILE_FROM_C_ONLY') sdist.run(self) os.remove(SKIP_CYTHON_FILE)
def run(self): self.run_command('build_man') # Run if available: if 'qt_resources' in self.distribution.cmdclass: self.run_command('qt_resources') sdist.run(self)
def run(self): if os.path.isdir(".git"): # We're in a Git branch log_cmd = subprocess.Popen(["git", "log"], stdout=subprocess.PIPE) changelog = log_cmd.communicate()[0] with open("ChangeLog", "w") as changelog_file: changelog_file.write(changelog) sdist.run(self)
def run(self): global __version__ __version__ = get_version(False) with open("version.txt", "w") as fd: fd.write(__version__) # Need to use old style super class invocation here for # backwards compatibility. sdist.run(self)
def run(self): # Compile the protobufs. base_dir = os.getcwd() subprocess.check_call(["python", "makefile.py"]) # Sync the artifact repo with upstream for distribution. subprocess.check_call(["python", "makefile.py"], cwd="grr/artifacts") sdist.run(self)
def run(self): if self.disabledl: print("Automatic downloading of databases is disabled.") print("As a result, they won't be included in the generated source distribution.") else: self._downloadDatabase("DShield", "http://www.dshield.org/ipsascii.html?limit=10000", "rules/dshield.dat") self._downloadDatabase("Spamhaus", "http://www.spamhaus.org/drop/drop.lasso", "rules/spamhaus_drop.dat") self._downloadDatabase("CIArmy", "http://cinsscore.com/list/ci-badguys.txt", "rules/ciarmy.dat") sdist.run(self)
def run(self): if os.path.isdir('.bzr'): # We're in a bzr branch log_cmd = subprocess.Popen(["bzr", "log", "--gnu"], stdout=subprocess.PIPE) changelog = log_cmd.communicate()[0] with open("ChangeLog", "w") as changelog_file: changelog_file.write(changelog) sdist.run(self)
def run(self): try: from Cython.Build import cythonize cythonize([ module.replace('.', '/') + '.pyx' for module in cython_modules ]) except ImportError: pass _sdist.run(self)
def run(self): dist = self.distribution try: old_values = dist.ext_modules, dist.ext_package, dist.packages, dist.package_dir dist.ext_modules, dist.ext_package = None, None dist.packages, dist.package_dir = None, None dSDist.run(self) finally: dist.ext_modules, dist.ext_package = old_values[:2] dist.packages, dist.package_dir = old_values[2:]
def run(self): if 'cython' in cmdclass: self.run_command('cython') else: for pyxfile in self._pyxfiles: cfile = pyxfile[:-3] + 'c' msg = "C-source file '%s' not found."%(cfile)+\ " Run 'setup.py cython' before sdist." assert os.path.isfile(cfile), msg sdist.run(self)
def run(self): try: os.remove(os.path.join(THIRDPARTY_BIN_PATH, "vsearch")) os.remove(os.path.join(THIRDPARTY_BIN_PATH, "muscle")) os.remove(os.path.join(THIRDPARTY_BIN_PATH, "fasttree")) os.remove(os.path.join(THIRDPARTY_BIN_PATH, "swarm")) except OSError: pass sdist.run(self)
def run(self): # Make sure the compiled Cython files in the distribution are up-to-date if USE_CYTHON: _cythonize(EXTENSIONS) else: warn('\n\n\033[91m\033[1m WARNING: ' 'IF YOU A PREPARING A DISTRIBUTION: Cython is not available! ' 'The cythonized `*.cpp` files may be out of date. Please ' 'install Cython and run `sdist` again.' '\033[0m\n') _sdist.run(self)
def run(self): # Build docs before source distribution try: import happydoclib except ImportError: pass else: self.run_command('happy') # Run the standard sdist command old_sdist.run(self)
def run(self): if os.path.isdir('.bzr'): # We're in a bzr branch env = os.environ.copy() env['BZR_PLUGIN_PATH'] = os.path.abspath('./bzrplugins') log_cmd = subprocess.Popen(["bzr", "log", "--novalog"], stdout=subprocess.PIPE, env=env) changelog = log_cmd.communicate()[0] mailmap = parse_mailmap() with open("ChangeLog", "w") as changelog_file: changelog_file.write(str_dict_replace(changelog, mailmap)) sdist.run(self)
def _build_target_release(self): self._download_python() for pkg in self._software_packages(): self._download(pkg) SDistCommand.run(self) dist_name = self.distribution.get_fullname() for src in self.archive_files: ext = os.path.basename(src).split(dist_name)[1] dest = '-'.join([dist_name, str(self.target_os), str(self.target_arch)]) + ext dest = os.path.join(self.dist_dir, dest) shutil.move(src, dest) print "Wrote '%s'" % dest
def run(self): copy_files(use_gpu=True) open(os.path.join(CURRENT_DIR, '_IS_SOURCE_PACKAGE.txt'), 'w').close() if os.path.exists(os.path.join(CURRENT_DIR, 'lightgbm', 'Release')): shutil.rmtree(os.path.join(CURRENT_DIR, 'lightgbm', 'Release')) if os.path.exists(os.path.join(CURRENT_DIR, 'lightgbm', 'windows', 'x64')): shutil.rmtree(os.path.join(CURRENT_DIR, 'lightgbm', 'windows', 'x64')) if os.path.isfile(os.path.join(CURRENT_DIR, 'lightgbm', 'lib_lightgbm.so')): os.remove(os.path.join(CURRENT_DIR, 'lightgbm', 'lib_lightgbm.so')) sdist.run(self) if os.path.isfile(os.path.join(CURRENT_DIR, '_IS_SOURCE_PACKAGE.txt')): os.remove(os.path.join(CURRENT_DIR, '_IS_SOURCE_PACKAGE.txt'))
def run(self): copy_files(use_gpu=True) open("./_IS_SOURCE_PACKAGE.txt", 'w').close() if os.path.exists("./lightgbm/Release/"): shutil.rmtree('./lightgbm/Release/') if os.path.exists("./lightgbm/windows/x64/"): shutil.rmtree('./lightgbm/windows/x64/') if os.path.isfile('./lightgbm/lib_lightgbm.so'): os.remove('./lightgbm/lib_lightgbm.so') sdist.run(self) if os.path.isfile('./_IS_SOURCE_PACKAGE.txt'): os.remove('./_IS_SOURCE_PACKAGE.txt')
def run(self): # Compile the protobufs. base_dir = os.getcwd() subprocess.check_call(["python", "makefile.py"]) # Sync the artifact repo with upstream for distribution. subprocess.check_call(["python", "makefile.py"], cwd="grr/artifacts") # Download the docs so they are available offline. subprocess.check_call(["python", "makefile.py"], cwd="docs") sdist.run(self)
def run(self): for filename in (glob.glob("capstone/*.dll") + glob.glob("capstone/*.so") + glob.glob("capstone/*.dylib")): try: os.unlink(filename) except Exception: pass # if prebuilt libraries are existent, then do not copy source if os.path.exists(PATH_LIB64) and os.path.exists(PATH_LIB32): return sdist.run(self) copy_sources() return sdist.run(self)
def run(self): base_dir = os.getcwd() self.CheckTemplates(base_dir, setup_args["version"]) sdist.run(self) print("To upload a release, run upload.sh [version]")
def run(self): update_inno_iss() convert_qt_ui() sdist.run(self)
def run(self): if self.strip_commit: self.distribution.get_version = lambda: __version__.split('+')[0] orig_sdist.run(self)
def run(self): converter = Converter() converter.dd_docs() sdist.run(self) converter.rm_docs()
def run(self): with write_version(): sdist.run(self)
def run(self): self.run_command("build_ext") _sdist.run(self)
def run(self): self.run_command("build_ext") return sdist.run(self)
def run(self): # Make sure the compiled Cython files in the distribution are up-to-date from Cython.Build import cythonize cythonize(cython_extensions) _sdist.run(self)
def run(self): build_web() sdist.run(self)
def run(self): return _sdist.run(self)
def run(self): self.run_command('build_static') SDistCommand.run(self)
def run(self): self.run_command('build_static') return sdist.run(self)
def run(self): self.run_command("build_builtin") sdist.run(self)
def run(self): if self.strip_commit: self.distribution.get_version = get_wa_version orig_sdist.run(self)
def run(self): update_version_py() self.distribution.metadata.version = get_version() return _sdist.run(self)
def run(self): self.run_command('build_ui') return sdist.run(self)
def run(self): self.run_command('compilemessages') _sdist.run(self)
def run(self): self.run_command('gulp_build') sdist.run(self)
def run(self): subprocess.check_call('cd frontend && npm run build', shell=True) sdist.run(self)
def run(self): from Cython.Build import cythonize cythonize(sources, language='c++') _sdist.run(self)
def run(self): if not self.no_make_ui_files: make_ui_files() sdist.run(self)
def run(self): # we need to download vswhere so throw the exception if we don't get it _DownloadVsWhere() sdist.run(self)
def run(self): with open(VERSION_PATH, 'w') as fobj: fobj.write(__version__) sdist_orig.run(self)
def run(self): compile_protos() sdist.run(self)
def run(self): # run prep if needed prep() _sdist.run(self)
def run(self): bpath = pjoin(build_root, pkg, 'cbuild') if os.path.isdir(bpath): subprocess.check_call(["rm", "-rf", bpath]) sdist.run(self)
def run(self): # Make sure the compiled Cython files in the distribution are up-to-date from Cython.Build import cythonize cythonize(['bayesflare/stats/general.pyx']) _sdist.run(self)
def run(self): sdist.run(self)
def run(self): copy_files() sdist.run(self)
def run(self): if not self.no_sync_artifacts: sync_artifacts() sdist.run(self)