def run(self): if not check_dependencies_once(): return # Add software that requires Numpy to be installed. if is_Numpy_installed(): self.packages.extend(NUMPY_PACKAGES) build_py.run(self)
def run(self): build_py.run(self) if self.dry_run: return self.generate_files(self.here, self.build_lib)
def run(self): compile_gemtools() compile_gemtools_cnv() parent_dir = os.path.split(os.path.abspath(__file__))[0] target_dir = "%s/%s" % (parent_dir, "python/gem/gembinaries") _install_bundle(target_dir, base=parent_dir + "/downloads") _build_py.run(self)
def run(self): try: # Create build date file. This is used to determine if the java jar and python code are # compatible with each other f = open('pyboof/build_date.txt', 'w') f.write(str(int(round(time.time() * 1000)))) f.close() # See if javac is available for compiling the java code if check_for_command("javac"): if call(["bash", "gradlew", "allJar"]) != 0: print "Gradle build failed." exit(1) else: print "javac isn't installed on your systems. exiting now" # TODO Should download instead if possible? exit(1) except Exception as e: print "Exception message:" print str(e) print e.message print if not os.path.isfile('python/pyboof/PyBoof-all.jar'): print "Gradle build failed AND there is no PyBoof-all.jar" print "" print "Did you run build as a regular user first?" print " ./setup.py build" print "Is Gradle and Java installed? Test by typing the following:" print " gradle allJar" exit(1) build_py.run(self)
def run(self): if not self.dry_run: target_dir = os.path.join(self.build_lib, MODEL_TARGET_DIR) self.mkpath(target_dir) build_stan_model(target_dir) build_py.run(self)
def run(self): qsimDll = Path(QSHARP_PACKAGE_ROOT / "Microsoft.Quantum.Simulation.Simulators.dll") if not qsimDll.exists(): self.run_command('nuget_copy') self.write_version() build_py.run(self)
def run(self): import subprocess import configparser build_py.run(self) proc = subprocess.Popen('git rev-parse --short HEAD', stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) repo_commit, _ = proc.communicate() # Fix for python 3 if PY3: repo_commit = repo_commit.decode() # We write the installation commit even if it's empty if PY3: cfg_parser = configparser.RawConfigParser() else: cfg_parser = configparser.ConfigParser() cfg_parser.read(pjoin('nipype', 'COMMIT_INFO.txt')) cfg_parser.set('commit hash', 'install_hash', repo_commit.strip()) out_pth = pjoin(self.build_lib, 'nipype', 'COMMIT_INFO.txt') if PY3: cfg_parser.write(open(out_pth, 'wt')) else: cfg_parser.write(open(out_pth, 'wb'))
def run(self, *args): BuildCommand.run(self, *args) for lang in langs: mkmo(lang) merge_i18n()
def run(self): if CAN_COPY: self.run_command('submodule_copy') if THRIFT is not None: self.run_command('gen_thrift') _build_py.run(self)
def run(self): """Generate the parsetab file so that we can install that too before calling the regular installer in the super class""" from pyoracc import _generate_parsetab _generate_parsetab() # Can't use super because build_py is an old style class in the Maven # Jython plugin setuptools version 0.6... build_py.run(self)
def run(self): # distutils uses old-style classes, so no super() build_py.run(self) # Use a fresh version of the python file copyfile(VERSION_METADATA_FILE, os.path.join(self.build_lib, VERSION_METADATA_FILE)) add_git_metadata(self.build_lib)
def run(self): if not self.dry_run: _build_requirements() lib_war = os.path.join(self.build_lib, 'weblabdeusto_data', 'war') if os.path.exists(lib_war): shutil.rmtree(lib_war) shutil.copytree(os.path.join('weblabdeusto_data', 'war'), lib_war) _build_py.run(self)
def run(self): # Compile the protobufs. base_dir = os.getcwd() os.chdir("proto") subprocess.check_call(["make"], shell=True) os.chdir(base_dir) build_py.run(self)
def run(self): if not self.dry_run: target_dir = os.path.join(self.build_lib, 'ctplot/static') self.mkpath(os.path.join(target_dir, 'de')) self.mkpath(os.path.join(target_dir, 'en')) generate_static_files(target_dir) build_py.run(self)
def run(self): try: assert(0 == subprocess.call(['make', 'bdist-prep'], cwd=here)) except: print "Error building package. Try running 'make'." exit(1) else: build_py.run(self)
def run(self): import xtuml from bridgepoint import oal l = xtuml.ModelLoader() l.input('', name='<empty string>') l.build_metamodel() oal.parse('') build_py.run(self)
def run(self): try: assert(0 == subprocess.call(['make', 'bdist-prep'], cwd=here)) except: print "Error building package. Try running 'make'." exit(1) else: # RMC From class "build_py", get function "run" and execute with parameter "self". build_py.run(self)
def run(self): """ Overwriting the existing command. """ os.chdir('respy') os.system('./waf distclean; ./waf configure build') os.chdir('../') build_py.run(self)
def run(self): # build foris plugin files from foris_plugins_distutils import build cmd = build(copy.copy(self.distribution)) cmd.ensure_finalized() cmd.run() # build package build_py.run(self)
def run(self): if not self.dry_run: import os.path from pygments.formatters import HtmlFormatter formatter = HtmlFormatter(linenos=True, cssclass='highlight') target_dir = os.path.join(self.build_lib, 'pastetron/static') self.mkpath(target_dir) with open(os.path.join(target_dir, 'pygments.css'), 'w') as fh: fh.write(formatter.get_style_defs('.highlight')) build_py.run(self)
def run(self): try: # Use the management command to compile messages # django 1.9 does not need the chdir anymore os.chdir('registration') call_command("compilemessages") os.chdir('..') except ImportError: pass _build_py.run(self)
def run(self): import GenerateSwoop dtd = open("Swoop/eagleDTD.py", "w") os.system("patch Swoop/eagle-7.2.0.dtd Swoop/eagle.dtd.diff -o Swoop/eagle-swoop.dtd") dtd.write('DTD="""') dtd.write(open("Swoop/eagle-swoop.dtd").read()) dtd.write('"""') dtd.close() GenerateSwoop.main("Swoop/Swoop.py") build_py.run(self)
def run(self): versions = get_versions(verbose=True) _build_py.run(self) # now locate _version.py in the new build/ directory and replace it # with an updated value if versionfile_build: target_versionfile = os.path.join(self.build_lib, versionfile_build) print("UPDATING %s" % target_versionfile) os.unlink(target_versionfile) with open(target_versionfile, "w") as f: f.write(SHORT_VERSION_PY % versions)
def run(self): build_py.run(self) if not self.dry_run: print("files = {}".format(self.data_files)) for package, src_dir, build_dir, files in self.data_files: new_files = [] for f in files: nf = self.make_module(src_dir, build_dir, f) if nf is not None: new_files.append(nf) files.extend(new_files)
def run(self): if not check_dependencies_once(): return if is_jython() and "Bio.Restriction" in self.packages: # Evil hack to work on Jython 2.7 # This is to avoid java.lang.RuntimeException: Method code too large! # from Bio/Restriction/Restriction_Dictionary.py self.packages.remove("Bio.Restriction") # Add software that requires Numpy to be installed. if is_Numpy_installed(): self.packages.extend(NUMPY_PACKAGES) build_py.run(self)
def run(self): build_py.run(self) package_dir = self.get_package_dir('scikits.cuda') inst_obj = self.distribution.command_obj['install'] install_headers_pdir, _ = os.path.split(inst_obj.install_headers) self.install_dir = install_headers_pdir + '/scikits/cuda' filename = os.path.join(self.build_lib, package_dir, '__info__.py') f = open(filename, 'w') f.write('# Installation location of C headers:\n') f.write('install_headers = \"%s\"\n' % self.install_dir) f.close()
def run(self): root = get_root() cfg = get_config_from_root(root) versions = get_versions() _build_py.run(self) # now locate _version.py in the new build/ directory and replace # it with an updated value if cfg.versionfile_build: target_versionfile = os.path.join(self.build_lib, cfg.versionfile_build) print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, versions)
def run (self): # Get paths for parser files setup_script_path = os.path.dirname (inspect.getfile (CustomBuildPy)) generate_parser_cmd = "{} -m grako -m Cubicle -o {} {}".format ( sys.executable, os.path.join (setup_script_path, "ctc", "parser.py"), os.path.join (setup_script_path, "ctc", "cubicle.ebnf")) print (generate_parser_cmd, file = sys.stderr) if os.system (generate_parser_cmd) != 0: raise Exception ("parser generation failed") # Build as usual build_py.run (self)
def run(self): """ During building, adds a variable with the complete version (from git describe) to forgi/__init__.py. """ outfile = self.get_module_outfile(self.build_lib, ["forgi"], "__init__") try: os.remove(outfile) #If we have an old version, delete it, so _build_py will copy the original version into the build directory. except: pass # Superclass build _build_py.run(self) # Apped the version number to init.py with open(outfile, "a") as of: of.write('\n__complete_version__ = "{}"'.format(forgi_version))
def run(self): options = {name[4:]: method() for name, method in self.distribution.__dict__.iteritems() if name.startswith('get_')} if self.packages: for package in self.packages: package_dir = self.get_package_dir(package) module_files = glob(os.path.join(package_dir, "*.py.in")) for module_file in module_files: package = package.split('.') module = os.path.basename(module_file).split('.')[0] outfile = self.get_module_outfile(self.build_lib, package, module) self.mkpath(os.path.dirname(outfile)) with open(module_file, 'r') as input_file: with open(outfile, 'w') as output_file: output_file.write(input_file.read().format(**options)) build_py.run(self)
def run(self): BuildPy.run(self) _maintain_symlinks('library', self.build_lib)
def run(self): self.run_command('compile_catalog') # build_py is an old style class so super cannot be used. build_py.run(self)
def run(self): make_library(True) build_py.run(self)
def run(self): try: self.run_command('compile_contracts') except SystemExit: pass build_py.run(self)
def run(self): self.run_command('compile_contracts') # ensure smoketest_config.json is generated from raiden.tests.utils.smoketest import load_or_create_smoketest_config load_or_create_smoketest_config() build_py.run(self)
def run(self): self.run_command("compile_catalog") # Can't use super here as in Py2 it isn't a new-style class. build_py.run(self)
def run(self): self.run_command("build_ext") build_py.run(self)
def run(self): # Import after setuptools has downloaded dependencies from gen import run run() return build_py.run(self)
def run(self): if not self.dry_run: write_baked_revision(self.build_lib) build_py.run(self)
def run(self): self.run_command("compile_pb") self.run_command("copy_grpc") build_py.run(self)
def run(self): InstallCSpice.get_cspice() build_py.run(self)
def run(self): generate_content() # distutils uses old-style classes, so no super() build_py.run(self)
def run(self): _compile_ccl(debug=self.distribution.debug) _build.run(self)
def run(self): generate_tfma_protos() # _build_py is an old-style class, so super() doesn't work. _build_py.run(self)
def run(self): self.run_command('compile_catalog') build_py.run(self) # old style class
def run(self): self.run_command('build_native') build_py.run(self)
def run(self): """Run the build.""" if not check_dependencies_once(): return build_py.run(self)
def run(self): make_library(False) build_py.run(self)
def run(self, *args, **kwargs): # regular build done by parent class build_py.run(self, *args, **kwargs) if not self.dry_run: # compatibility with the parent options self.copy_pkg(os.path.join('numpy_benchmarks', 'benchmarks'))
def run(self): make_info_module(packname, version) self.make_compat_modules() build_py.run(self)
def run(self): convert_rnc() build_py.run(self)
def run(self): self.run_command('generate_protos') build_py.run(self)
def run(self): self.run_command('build_proto') build_py.run(self)
def run(self): self.execute(self.pin_version, ()) _build_py.run(self)
def run(self): for command in build_dependencies: self.run_command(command) build_py.run(self)
def run(self): _generate_parser() build_py.run(self)
def run(self): # Run build_ext first so that SWIG generated files are included self.run_command('build_ext') return build_py.run(self)
def run(self): build_py.run(self) target_dir = join(self.build_lib, self.distribution.metadata.name) write_version_into_init(target_dir, self.distribution.metadata.version) write_version_file(target_dir, self.distribution.metadata.version)
def run(self): write_version_py(version_py) build_py.run(self)
def run(self): subprocess.check_call(["make", "gensrc"]) build_py.run(self)